From 5275ab531fd716d3dd9732a1fc78599e10e4d846 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 6 Nov 2023 15:59:30 +0100 Subject: [PATCH 01/19] Revert "More additions to the standard library (#18799)" This reverts commit ef97ee2a980e794e7877ead088949dc48afcefbc, reversing changes made to 5454110496c35799743a060bbe9e5e18e9e32398. --- .../dotty/tools/dotc/ast/TreeTypeMap.scala | 3 +- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 12 +- .../src/dotty/tools/dotc/cc/CaptureSet.scala | 1 - .../dotty/tools/dotc/cc/CheckCaptures.scala | 187 +- compiler/src/dotty/tools/dotc/cc/Setup.scala | 5 +- .../dotty/tools/dotc/core/Definitions.scala | 2 +- .../dotty/tools/dotc/core/Substituters.scala | 2 +- .../reporting/UniqueMessagePositions.scala | 10 +- .../tools/dotc/transform/CapturedVars.scala | 55 +- .../dotty/tools/dotc/transform/Recheck.scala | 6 +- .../src/dotty/tools/dotc/typer/Namer.scala | 9 +- .../dotty/tools/dotc/typer/RefChecks.scala | 5 +- .../dotty/tools/dotc/CompilationTests.scala | 2 +- .../unchecked/uncheckedCapabilityLeaks.scala | 12 + .../unchecked/uncheckedCaptures.scala | 12 - tests/neg-custom-args/captures/buffers.check | 26 - tests/neg-custom-args/captures/buffers.scala | 30 - tests/neg-custom-args/captures/cc-this.check | 2 +- tests/neg-custom-args/captures/cc-this2.check | 14 +- .../captures/cc-this2/D_2.scala | 2 +- .../captures/exception-definitions.check | 17 +- .../captures/exception-definitions.scala | 4 +- tests/neg-custom-args/captures/filevar.scala | 2 +- .../captures/leaked-curried.check | 11 +- .../captures/leaked-curried.scala | 4 +- tests/neg-custom-args/captures/levels.check | 4 +- .../neg-custom-args/captures/localcaps.check | 12 - .../neg-custom-args/captures/localcaps.scala | 2 +- tests/neg-custom-args/captures/pairs.check | 8 - tests/neg-custom-args/captures/pairs.scala | 4 +- .../recursive-leaking-local-cap.scala | 22 - .../captures/sealed-classes.scala | 21 - .../captures/sealed-leaks.check | 50 - .../captures/sealed-leaks.scala | 32 +- .../captures/sealed-refs.scala | 42 - tests/neg/class-mods.scala | 2 +- .../captures/sealed-lowerbound.scala | 12 - .../captures/sealed-value-class.scala | 3 - tests/pos-custom-args/captures/steppers.scala | 27 - .../stdlib/collection/ArrayOps.scala | 1664 ----------- .../stdlib/collection/BitSet.scala | 348 --- .../stdlib/collection/BufferedIterator.scala | 32 - .../stdlib/collection/BuildFrom.scala | 128 - .../stdlib/collection/DefaultMap.scala | 21 - .../stdlib/collection/Factory.scala | 798 ------ .../stdlib/collection/Hashing.scala | 63 - .../stdlib/collection/IndexedSeq.scala | 6 +- .../stdlib/collection/IndexedSeqView.scala | 187 -- .../stdlib/collection/Iterable.scala | 8 +- .../stdlib/collection/IterableOnce.scala | 23 +- .../stdlib/collection/Iterator.scala | 20 +- .../stdlib/collection/JavaConverters.scala | 336 --- .../stdlib/collection/LazyZipOps.scala | 423 --- tests/pos-special/stdlib/collection/Map.scala | 21 +- .../stdlib/collection/MapView.scala | 196 -- .../stdlib/collection/Searching.scala | 58 - tests/pos-special/stdlib/collection/Seq.scala | 16 +- .../stdlib/collection/SeqMap.scala | 41 - .../stdlib/collection/SeqView.scala | 232 -- tests/pos-special/stdlib/collection/Set.scala | 271 -- .../stdlib/collection/SortedMap.scala | 222 -- .../stdlib/collection/SortedOps.scala | 91 - .../stdlib/collection/SortedSet.scala | 190 -- .../stdlib/collection/Stepper.scala | 378 --- .../stdlib/collection/StepperShape.scala | 115 - .../collection/StrictOptimizedMapOps.scala | 50 - .../collection/StrictOptimizedSeqOps.scala | 9 +- .../collection/StrictOptimizedSetOps.scala | 30 - .../StrictOptimizedSortedMapOps.scala | 47 - .../StrictOptimizedSortedSetOps.scala | 42 - .../stdlib/collection/StringOps.scala | 2 +- .../stdlib/collection/StringParsers.scala | 320 --- .../pos-special/stdlib/collection/View.scala | 2 +- .../stdlib/collection/WithFilter.scala | 72 - .../stdlib/collection/concurrent/Map.scala | 193 -- .../collection/generic/BitOperations.scala | 51 - .../generic/DefaultSerializationProxy.scala | 90 - .../collection/generic/IsIterable.scala | 165 -- .../collection/generic/IsIterableOnce.scala | 72 - .../stdlib/collection/generic/IsMap.scala | 115 - .../stdlib/collection/generic/IsSeq.scala | 123 - .../collection/generic/Subtractable.scala | 63 - .../stdlib/collection/generic/package.scala | 35 - .../collection/immutable/ArraySeq.scala | 692 ----- .../stdlib/collection/immutable/BitSet.scala | 376 --- .../collection/immutable/ChampCommon.scala | 253 -- .../stdlib/collection/immutable/HashMap.scala | 2425 ---------------- .../stdlib/collection/immutable/HashSet.scala | 2125 -------------- .../stdlib/collection/immutable/IntMap.scala | 504 ---- .../collection/immutable/Iterable.scala | 2 +- .../immutable/LazyListIterable.scala | 1376 --------- .../stdlib/collection/immutable/ListMap.scala | 373 --- .../stdlib/collection/immutable/ListSet.scala | 140 - .../stdlib/collection/immutable/LongMap.scala | 492 ---- .../stdlib/collection/immutable/Map.scala | 694 ----- .../collection/immutable/NumericRange.scala | 509 ---- .../stdlib/collection/immutable/Queue.scala | 218 -- .../stdlib/collection/immutable/Range.scala | 673 ----- .../collection/immutable/RedBlackTree.scala | 1234 -------- .../stdlib/collection/immutable/Seq.scala | 2 +- .../stdlib/collection/immutable/SeqMap.scala | 278 -- .../stdlib/collection/immutable/Set.scala | 400 --- .../collection/immutable/SortedMap.scala | 178 -- .../collection/immutable/SortedSet.scala | 58 - .../immutable/StrictOptimizedSeqOps.scala | 82 - .../stdlib/collection/immutable/TreeMap.scala | 372 --- .../collection/immutable/TreeSeqMap.scala | 651 ----- .../stdlib/collection/immutable/TreeSet.scala | 297 -- .../stdlib/collection/immutable/Vector.scala | 2476 ----------------- .../collection/immutable/VectorMap.scala | 277 -- .../collection/immutable/WrappedString.scala | 142 - .../stdlib/collection/immutable/package.scala | 29 - .../stdlib/collection/mutable/AnyRefMap.scala | 603 ---- .../collection/mutable/ArrayBuffer.scala | 406 --- .../collection/mutable/ArrayBuilder.scala | 523 ---- .../collection/mutable/ArrayDeque.scala | 646 ----- .../stdlib/collection/mutable/ArraySeq.scala | 351 --- .../stdlib/collection/mutable/BitSet.scala | 393 --- .../stdlib/collection/mutable/Buffer.scala | 5 +- .../mutable/CheckedIndexedSeqView.scala | 120 - .../stdlib/collection/mutable/Cloneable.scala | 22 - .../mutable/CollisionProofHashMap.scala | 889 ------ .../collection/mutable/GrowableBuilder.scala | 37 - .../stdlib/collection/mutable/HashMap.scala | 655 ----- .../stdlib/collection/mutable/HashSet.scala | 457 --- .../stdlib/collection/mutable/HashTable.scala | 418 --- .../collection/mutable/ImmutableBuilder.scala | 32 - .../collection/mutable/IndexedSeq.scala | 84 - .../collection/mutable/LinkedHashMap.scala | 510 ---- .../collection/mutable/LinkedHashSet.scala | 349 --- .../stdlib/collection/mutable/ListMap.scala | 83 - .../stdlib/collection/mutable/LongMap.scala | 674 ----- .../stdlib/collection/mutable/Map.scala | 271 -- .../stdlib/collection/mutable/MultiMap.scala | 116 - .../collection/mutable/OpenHashMap.scala | 307 -- .../collection/mutable/PriorityQueue.scala | 403 --- .../stdlib/collection/mutable/Queue.scala | 139 - .../collection/mutable/RedBlackTree.scala | 653 ----- .../collection/mutable/ReusableBuilder.scala | 56 - .../stdlib/collection/mutable/Set.scala | 123 - .../stdlib/collection/mutable/SortedMap.scala | 104 - .../stdlib/collection/mutable/SortedSet.scala | 49 - .../stdlib/collection/mutable/Stack.scala | 144 - .../collection/mutable/StringBuilder.scala | 2 +- .../stdlib/collection/mutable/TreeMap.scala | 258 -- .../stdlib/collection/mutable/TreeSet.scala | 219 -- .../collection/mutable/UnrolledBuffer.scala | 443 --- .../collection/mutable/WeakHashMap.scala | 56 - .../stdlib/collection/mutable/package.scala | 42 - .../stdlib/collection/package.scala | 81 - 150 files changed, 189 insertions(+), 35907 deletions(-) delete mode 100644 library/src/scala/annotation/unchecked/uncheckedCaptures.scala delete mode 100644 tests/neg-custom-args/captures/buffers.check delete mode 100644 tests/neg-custom-args/captures/buffers.scala delete mode 100644 tests/neg-custom-args/captures/localcaps.check delete mode 100644 tests/neg-custom-args/captures/recursive-leaking-local-cap.scala delete mode 100644 tests/neg-custom-args/captures/sealed-classes.scala delete mode 100644 tests/neg-custom-args/captures/sealed-leaks.check delete mode 100644 tests/neg-custom-args/captures/sealed-refs.scala delete mode 100644 tests/pos-custom-args/captures/sealed-lowerbound.scala delete mode 100644 tests/pos-custom-args/captures/sealed-value-class.scala delete mode 100644 tests/pos-custom-args/captures/steppers.scala delete mode 100644 tests/pos-special/stdlib/collection/ArrayOps.scala delete mode 100644 tests/pos-special/stdlib/collection/BitSet.scala delete mode 100644 tests/pos-special/stdlib/collection/BufferedIterator.scala delete mode 100644 tests/pos-special/stdlib/collection/BuildFrom.scala delete mode 100644 tests/pos-special/stdlib/collection/DefaultMap.scala delete mode 100644 tests/pos-special/stdlib/collection/Factory.scala delete mode 100644 tests/pos-special/stdlib/collection/Hashing.scala delete mode 100644 tests/pos-special/stdlib/collection/IndexedSeqView.scala delete mode 100644 tests/pos-special/stdlib/collection/JavaConverters.scala delete mode 100644 tests/pos-special/stdlib/collection/LazyZipOps.scala delete mode 100644 tests/pos-special/stdlib/collection/MapView.scala delete mode 100644 tests/pos-special/stdlib/collection/Searching.scala delete mode 100644 tests/pos-special/stdlib/collection/SeqMap.scala delete mode 100644 tests/pos-special/stdlib/collection/SeqView.scala delete mode 100644 tests/pos-special/stdlib/collection/Set.scala delete mode 100644 tests/pos-special/stdlib/collection/SortedMap.scala delete mode 100644 tests/pos-special/stdlib/collection/SortedOps.scala delete mode 100644 tests/pos-special/stdlib/collection/SortedSet.scala delete mode 100644 tests/pos-special/stdlib/collection/Stepper.scala delete mode 100644 tests/pos-special/stdlib/collection/StepperShape.scala delete mode 100644 tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala delete mode 100644 tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala delete mode 100644 tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala delete mode 100644 tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala delete mode 100644 tests/pos-special/stdlib/collection/StringParsers.scala delete mode 100644 tests/pos-special/stdlib/collection/WithFilter.scala delete mode 100644 tests/pos-special/stdlib/collection/concurrent/Map.scala delete mode 100644 tests/pos-special/stdlib/collection/generic/BitOperations.scala delete mode 100644 tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala delete mode 100644 tests/pos-special/stdlib/collection/generic/IsIterable.scala delete mode 100644 tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala delete mode 100644 tests/pos-special/stdlib/collection/generic/IsMap.scala delete mode 100644 tests/pos-special/stdlib/collection/generic/IsSeq.scala delete mode 100644 tests/pos-special/stdlib/collection/generic/Subtractable.scala delete mode 100644 tests/pos-special/stdlib/collection/generic/package.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/ArraySeq.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/BitSet.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/ChampCommon.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/HashMap.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/HashSet.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/IntMap.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/ListMap.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/ListSet.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/LongMap.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/Map.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/NumericRange.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/Queue.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/Range.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/SeqMap.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/Set.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/SortedMap.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/SortedSet.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/TreeMap.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/TreeSet.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/Vector.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/VectorMap.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/WrappedString.scala delete mode 100644 tests/pos-special/stdlib/collection/immutable/package.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/ArraySeq.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/BitSet.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/Cloneable.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/HashMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/HashSet.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/HashTable.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/ListMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/LongMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/Map.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/MultiMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/Queue.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/Set.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/SortedMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/SortedSet.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/Stack.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/TreeMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/TreeSet.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala delete mode 100644 tests/pos-special/stdlib/collection/mutable/package.scala delete mode 100644 tests/pos-special/stdlib/collection/package.scala diff --git a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala index d2e18729836b..955892b2ae22 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala @@ -105,8 +105,7 @@ class TreeTypeMap( tree1.withType(mapType(tree1.tpe)) match { case id: Ident => if needsSelect(id.tpe) then - try ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) - catch case ex: TypeError => super.transform(id) + ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) else super.transform(id) case sel: Select => diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 40e94ebde5dd..dccf07ba199e 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -206,12 +206,6 @@ extension (tp: Type) case _: TypeRef | _: AppliedType => tp.typeSymbol.hasAnnotation(defn.CapabilityAnnot) case _ => false - def isSealed(using Context): Boolean = tp match - case tp: TypeParamRef => tp.underlying.isSealed - case tp: TypeBounds => tp.hi.hasAnnotation(defn.Caps_SealedAnnot) - case tp: TypeRef => tp.symbol.is(Sealed) || tp.info.isSealed // TODO: drop symbol flag? - case _ => false - /** Drop @retains annotations everywhere */ def dropAllRetains(using Context): Type = // TODO we should drop retains from inferred types before unpickling val tm = new TypeMap: @@ -231,11 +225,7 @@ extension (cls: ClassSymbol) && bc.givenSelfType.dealiasKeepAnnots.match case CapturingType(_, refs) => refs.isAlwaysEmpty case RetainingType(_, refs) => refs.isEmpty - case selfType => - isCaptureChecking // At Setup we have not processed self types yet, so - // unless a self type is explicitly given, we can't tell - // and err on the side of impure. - && selfType.exists && selfType.captureSet.isAlwaysEmpty + case selfType => selfType.exists && selfType.captureSet.isAlwaysEmpty extension (sym: Symbol) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 7261c760aa01..2586d449dfd4 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -872,7 +872,6 @@ object CaptureSet: upper.isAlwaysEmpty || upper.isConst && upper.elems.size == 1 && upper.elems.contains(r1) if variance > 0 || isExact then upper else if variance < 0 then CaptureSet.empty - else if ctx.mode.is(Mode.Printing) then upper else assert(false, i"trying to add $upper from $r via ${tm.getClass} in a non-variant setting") /** Apply `f` to each element in `xs`, and join result sets with `++` */ diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index a49bd9f79351..fab0689b4df2 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -14,14 +14,14 @@ import typer.RefChecks.{checkAllOverrides, checkSelfAgainstParents, OverridingPa import typer.Checking.{checkBounds, checkAppliedTypesIn} import typer.ErrorReporting.{Addenda, err} import typer.ProtoTypes.{AnySelectionProto, LhsProto} -import util.{SimpleIdentitySet, EqHashMap, EqHashSet, SrcPos, Property} +import util.{SimpleIdentitySet, EqHashMap, SrcPos, Property} import transform.SymUtils.* -import transform.{Recheck, PreRecheck, CapturedVars} +import transform.{Recheck, PreRecheck} import Recheck.* import scala.collection.mutable import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap, CompareResult} import StdNames.nme -import NameKinds.{DefaultGetterName, WildcardParamName} +import NameKinds.DefaultGetterName import reporting.trace /** The capture checker */ @@ -147,49 +147,33 @@ object CheckCaptures: private def disallowRootCapabilitiesIn(tp: Type, carrier: Symbol, what: String, have: String, addendum: String, pos: SrcPos)(using Context) = val check = new TypeTraverser: - private val seen = new EqHashSet[TypeRef] - - /** Check that there is at least one method containing carrier and defined - * in the scope of tparam. E.g. this is OK: - * def f[T] = { ... var x: T ... } - * So is this: - * class C[T] { def f() = { class D { var x: T }}} - * But this is not OK: - * class C[T] { object o { var x: T }} - */ extension (tparam: Symbol) def isParametricIn(carrier: Symbol): Boolean = - carrier.exists && { - val encl = carrier.owner.enclosingMethodOrClass - if encl.isClass then tparam.isParametricIn(encl) - else - def recur(encl: Symbol): Boolean = - if tparam.owner == encl then true - else if encl.isStatic || !encl.exists then false - else recur(encl.owner.enclosingMethodOrClass) - recur(encl) - } + val encl = carrier.owner.enclosingMethodOrClass + if encl.isClass then tparam.isParametricIn(encl) + else + def recur(encl: Symbol): Boolean = + if tparam.owner == encl then true + else if encl.isStatic || !encl.exists then false + else recur(encl.owner.enclosingMethodOrClass) + recur(encl) def traverse(t: Type) = t.dealiasKeepAnnots match case t: TypeRef => - if !seen.contains(t) then - capt.println(i"disallow $t, $tp, $what, ${t.isSealed}") - seen += t - t.info match - case TypeBounds(_, hi) if !t.isSealed && !t.symbol.isParametricIn(carrier) => - if hi.isAny then - val detailStr = - if t eq tp then "variable" - else i"refers to the type variable $t, which" - report.error( - em"""$what cannot $have $tp since - |that type $detailStr is not sealed. - |$addendum""", - pos) - else - traverse(hi) - case _ => - traverseChildren(t) + capt.println(i"disallow $t, $tp, $what, ${t.symbol.is(Sealed)}") + t.info match + case TypeBounds(_, hi) + if !t.symbol.is(Sealed) && !t.symbol.isParametricIn(carrier) => + if hi.isAny then + report.error( + em"""$what cannot $have $tp since + |that type refers to the type variable $t, which is not sealed. + |$addendum""", + pos) + else + traverse(hi) + case _ => + traverseChildren(t) case AnnotatedType(_, ann) if ann.symbol == defn.UncheckedCapturesAnnot => () case t => @@ -276,12 +260,11 @@ class CheckCaptures extends Recheck, SymTransformer: pos, provenance) /** Check subcapturing `cs1 <: cs2`, report error on failure */ - def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos, - provenance: => String = "", cs1description: String = "")(using Context) = + def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos, provenance: => String = "")(using Context) = checkOK( cs1.subCaptures(cs2, frozen = false), - if cs1.elems.size == 1 then i"reference ${cs1.elems.toList.head}$cs1description is not" - else i"references $cs1$cs1description are not all", + if cs1.elems.size == 1 then i"reference ${cs1.elems.toList.head} is not" + else i"references $cs1 are not all", pos, provenance) /** The current environment */ @@ -559,10 +542,10 @@ class CheckCaptures extends Recheck, SymTransformer: val TypeApply(fn, args) = tree val polyType = atPhase(thisPhase.prev): fn.tpe.widen.asInstanceOf[TypeLambda] - for case (arg: TypeTree, formal, pname) <- args.lazyZip(polyType.paramRefs).lazyZip((polyType.paramNames)) do - if formal.isSealed then + for case (arg: TypeTree, pinfo, pname) <- args.lazyZip(polyType.paramInfos).lazyZip((polyType.paramNames)) do + if pinfo.bounds.hi.hasAnnotation(defn.Caps_SealedAnnot) then def where = if fn.symbol.exists then i" in an argument of ${fn.symbol}" else "" - disallowRootCapabilitiesIn(arg.knownType, NoSymbol, + disallowRootCapabilitiesIn(arg.knownType, fn.symbol, i"Sealed type variable $pname", "be instantiated to", i"This is often caused by a local capability$where\nleaking as part of its result.", tree.srcPos) @@ -603,58 +586,13 @@ class CheckCaptures extends Recheck, SymTransformer: openClosures = openClosures.tail end recheckClosureBlock - /** Maps mutable variables to the symbols that capture them (in the - * CheckCaptures sense, i.e. symbol is referred to from a different method - * than the one it is defined in). - */ - private val capturedBy = util.HashMap[Symbol, Symbol]() - - /** Maps anonymous functions appearing as function arguments to - * the function that is called. - */ - private val anonFunCallee = util.HashMap[Symbol, Symbol]() - - /** Populates `capturedBy` and `anonFunCallee`. Called by `checkUnit`. - */ - private def collectCapturedMutVars(using Context) = new TreeTraverser: - def traverse(tree: Tree)(using Context) = tree match - case id: Ident => - val sym = id.symbol - if sym.is(Mutable, butNot = Method) && sym.owner.isTerm then - val enclMeth = ctx.owner.enclosingMethod - if sym.enclosingMethod != enclMeth then - capturedBy(sym) = enclMeth - case Apply(fn, args) => - for case closureDef(mdef) <- args do - anonFunCallee(mdef.symbol) = fn.symbol - traverseChildren(tree) - case Inlined(_, bindings, expansion) => - traverse(bindings) - traverse(expansion) - case mdef: DefDef => - if !mdef.symbol.isInlineMethod then traverseChildren(tree) - case _ => - traverseChildren(tree) - override def recheckValDef(tree: ValDef, sym: Symbol)(using Context): Type = try if sym.is(Module) then sym.info // Modules are checked by checking the module class else if sym.is(Mutable) && !sym.hasAnnotation(defn.UncheckedCapturesAnnot) then - val (carrier, addendum) = capturedBy.get(sym) match - case Some(encl) => - val enclStr = - if encl.isAnonymousFunction then - val location = anonFunCallee.get(encl) match - case Some(meth) if meth.exists => i" argument in a call to $meth" - case _ => "" - s"an anonymous function$location" - else encl.show - (NoSymbol, i"\nNote that $sym does not count as local since it is captured by $enclStr") - case _ => - (sym, "") - disallowRootCapabilitiesIn( - tree.tpt.knownType, carrier, i"Mutable $sym", "have type", addendum, sym.srcPos) + disallowRootCapabilitiesIn(tree.tpt.knownType, sym, + i"mutable $sym", "have type", "", sym.srcPos) checkInferredResult(super.recheckValDef(tree, sym), tree) finally if !sym.is(Param) then @@ -742,15 +680,9 @@ class CheckCaptures extends Recheck, SymTransformer: if !param.hasAnnotation(defn.ConstructorOnlyAnnot) then checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) for pureBase <- cls.pureBaseClass do // (4) - def selfType = impl.body - .collect: - case TypeDef(tpnme.SELF, rhs) => rhs - .headOption - .getOrElse(tree) - .orElse(tree) checkSubset(thisSet, CaptureSet.empty.withDescription(i"of pure base class $pureBase"), - selfType.srcPos, cs1description = " captured by this self type") + tree.srcPos) super.recheckClassDef(tree, impl, cls) finally curEnv = saved @@ -1190,8 +1122,6 @@ class CheckCaptures extends Recheck, SymTransformer: override def needsCheck(overriding: Symbol, overridden: Symbol)(using Context): Boolean = !setup.isPreCC(overriding) && !setup.isPreCC(overridden) - - override def checkInheritedTraitParameters: Boolean = false end OverridingPairsCheckerCC def traverse(t: Tree)(using Context) = @@ -1228,12 +1158,11 @@ class CheckCaptures extends Recheck, SymTransformer: private val setup: SetupAPI = thisPhase.prev.asInstanceOf[Setup] override def checkUnit(unit: CompilationUnit)(using Context): Unit = - setup.setupUnit(unit.tpdTree, completeDef) - collectCapturedMutVars.traverse(unit.tpdTree) + setup.setupUnit(ctx.compilationUnit.tpdTree, completeDef) if ctx.settings.YccPrintSetup.value then val echoHeader = "[[syntax tree at end of cc setup]]" - val treeString = show(unit.tpdTree) + val treeString = show(ctx.compilationUnit.tpdTree) report.echo(s"$echoHeader\n$treeString\n") withCaptureSetsExplained: @@ -1369,39 +1298,6 @@ class CheckCaptures extends Recheck, SymTransformer: checker.traverse(tree.knownType) end healTypeParam - def checkNoLocalRootIn(sym: Symbol, info: Type, pos: SrcPos)(using Context): Unit = - val check = new TypeTraverser: - def traverse(tp: Type) = tp match - case tp: TermRef if tp.isLocalRootCapability => - if tp.localRootOwner == sym then - report.error(i"local root $tp cannot appear in type of $sym", pos) - case tp: ClassInfo => - traverseChildren(tp) - for mbr <- tp.decls do - if !mbr.is(Private) then checkNoLocalRootIn(sym, mbr.info, mbr.srcPos) - case _ => - traverseChildren(tp) - check.traverse(info) - - def checkArraysAreSealedIn(tp: Type, pos: SrcPos)(using Context): Unit = - val check = new TypeTraverser: - def traverse(t: Type): Unit = - t match - case AppliedType(tycon, arg :: Nil) if tycon.typeSymbol == defn.ArrayClass => - if !(pos.span.isSynthetic && ctx.reporter.errorsReported) - && !arg.typeSymbol.name.is(WildcardParamName) - then - CheckCaptures.disallowRootCapabilitiesIn(arg, NoSymbol, - "Array", "have element type", - "Since arrays are mutable, they have to be treated like variables,\nso their element type must be sealed.", - pos) - traverseChildren(t) - case defn.RefinedFunctionOf(rinfo: MethodType) => - traverse(rinfo) - case _ => - traverseChildren(t) - check.traverse(tp) - /** Perform the following kinds of checks * - Check all explicitly written capturing types for well-formedness using `checkWellFormedPost`. * - Check that arguments of TypeApplys and AppliedTypes conform to their bounds. @@ -1413,11 +1309,10 @@ class CheckCaptures extends Recheck, SymTransformer: val lctx = tree match case _: DefTree | _: TypeDef if tree.symbol.exists => ctx.withOwner(tree.symbol) case _ => ctx - trace(i"post check $tree"): - traverseChildren(tree)(using lctx) - check(tree) + traverseChildren(tree)(using lctx) + check(tree) def check(tree: Tree)(using Context) = tree match - case TypeApply(fun, args) => + case t @ TypeApply(fun, args) => fun.knownType.widen match case tl: PolyType => val normArgs = args.lazyZip(tl.paramInfos).map: (arg, bounds) => @@ -1426,10 +1321,6 @@ class CheckCaptures extends Recheck, SymTransformer: checkBounds(normArgs, tl) args.lazyZip(tl.paramNames).foreach(healTypeParam(_, _, fun.symbol)) case _ => - case _: ValOrDefDef | _: TypeDef => - checkNoLocalRootIn(tree.symbol, tree.symbol.info, tree.symbol.srcPos) - case tree: TypeTree => - checkArraysAreSealedIn(tree.tpe, tree.srcPos) case _ => end check end checker diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 8ba53693870c..68fd79048f41 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -522,9 +522,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: tree.symbol match case cls: ClassSymbol => val cinfo @ ClassInfo(prefix, _, ps, decls, selfInfo) = cls.classInfo - if ((selfInfo eq NoType) || cls.is(ModuleClass) && !cls.isStatic) - && !cls.isPureClass - then + if (selfInfo eq NoType) || cls.is(ModuleClass) && !cls.isStatic then // add capture set to self type of nested classes if no self type is given explicitly. val newSelfType = CapturingType(cinfo.selfType, CaptureSet.Var(cls)) val ps1 = inContext(ctx.withOwner(cls)): @@ -707,5 +705,4 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: def postCheck()(using Context): Unit = for chk <- todoAtPostCheck do chk(ctx) - todoAtPostCheck.clear() end Setup \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 40370973ebf0..205d43cd07ca 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1443,7 +1443,7 @@ class Definitions { /** Base classes that are assumed to be pure for the purposes of capture checking. * Every class inheriting from a pure baseclass is pure. */ - @tu lazy val pureBaseClasses = Set(ThrowableClass, PureClass) + @tu lazy val pureBaseClasses = Set(defn.ThrowableClass) /** Non-inheritable lasses that are assumed to be pure for the purposes of capture checking, */ diff --git a/compiler/src/dotty/tools/dotc/core/Substituters.scala b/compiler/src/dotty/tools/dotc/core/Substituters.scala index bd30177adcb4..5a641416b3e1 100644 --- a/compiler/src/dotty/tools/dotc/core/Substituters.scala +++ b/compiler/src/dotty/tools/dotc/core/Substituters.scala @@ -189,7 +189,7 @@ object Substituters: def apply(tp: Type): Type = substThis(tp, from, to, this)(using mapCtx) } - final class SubstRecThisMap(from: Type, to: Type)(using Context) extends DeepTypeMap, IdempotentCaptRefMap { + final class SubstRecThisMap(from: Type, to: Type)(using Context) extends DeepTypeMap { def apply(tp: Type): Type = substRecThis(tp, from, to, this)(using mapCtx) } diff --git a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala index 71b2636ab8ed..98fd7da3032a 100644 --- a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala +++ b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala @@ -25,14 +25,14 @@ trait UniqueMessagePositions extends Reporter { || dia.pos.exists && !ctx.settings.YshowSuppressedErrors.value - && (dia.pos.start to dia.pos.end).exists: offset => - positions.get((ctx.source, offset)).exists(_.hides(dia)) + && (dia.pos.start to dia.pos.end).exists(pos => + positions.get((ctx.source, pos)).exists(_.hides(dia))) override def markReported(dia: Diagnostic)(using Context): Unit = if dia.pos.exists then - for offset <- dia.pos.start to dia.pos.end do - positions.get((ctx.source, offset)) match + for (pos <- dia.pos.start to dia.pos.end) + positions.get(ctx.source, pos) match case Some(dia1) if dia1.hides(dia) => - case _ => positions((ctx.source, offset)) = dia + case _ => positions((ctx.source, pos)) = dia super.markReported(dia) } diff --git a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala index 202e3d72fa25..a018bbd1a3ac 100644 --- a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala +++ b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala @@ -13,20 +13,25 @@ import core.NameKinds.TempResultName import core.Constants._ import util.Store import dotty.tools.uncheckedNN -import ast.tpd.* -import compiletime.uninitialized + +import scala.compiletime.uninitialized /** This phase translates variables that are captured in closures to * heap-allocated refs. */ class CapturedVars extends MiniPhase with IdentityDenotTransformer: thisPhase => + import ast.tpd._ override def phaseName: String = CapturedVars.name override def description: String = CapturedVars.description - private val captured = util.HashSet[Symbol]() + private[this] var Captured: Store.Location[util.ReadOnlySet[Symbol]] = uninitialized + private def captured(using Context) = ctx.store(Captured) + + override def initContext(ctx: FreshContext): Unit = + Captured = ctx.addLocation(util.ReadOnlySet.empty) private class RefInfo(using Context) { /** The classes for which a Ref type exists. */ @@ -52,10 +57,33 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer: myRefInfo.uncheckedNN } - override def prepareForUnit(tree: Tree)(using Context): Context = - captured.clear() - atPhase(thisPhase)(CapturedVars.collect(captured)).traverse(tree) - ctx + private class CollectCaptured extends TreeTraverser { + private val captured = util.HashSet[Symbol]() + def traverse(tree: Tree)(using Context) = tree match { + case id: Ident => + val sym = id.symbol + if (sym.is(Mutable, butNot = Method) && sym.owner.isTerm) { + val enclMeth = ctx.owner.enclosingMethod + if (sym.enclosingMethod != enclMeth) { + report.log(i"capturing $sym in ${sym.enclosingMethod}, referenced from $enclMeth") + captured += sym + } + } + case _ => + traverseChildren(tree) + } + def runOver(tree: Tree)(using Context): util.ReadOnlySet[Symbol] = { + traverse(tree) + captured + } + } + + override def prepareForUnit(tree: Tree)(using Context): Context = { + val captured = atPhase(thisPhase) { + CollectCaptured().runOver(ctx.compilationUnit.tpdTree) + } + ctx.fresh.updateStore(Captured, captured) + } /** The {Volatile|}{Int|Double|...|Object}Ref class corresponding to the class `cls`, * depending on whether the reference should be @volatile @@ -115,16 +143,3 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer: object CapturedVars: val name: String = "capturedVars" val description: String = "represent vars captured by closures as heap objects" - - def collect(captured: util.HashSet[Symbol]): TreeTraverser = new: - def traverse(tree: Tree)(using Context) = tree match - case id: Ident => - val sym = id.symbol - if sym.is(Mutable, butNot = Method) && sym.owner.isTerm then - val enclMeth = ctx.owner.enclosingMethod - if sym.enclosingMethod != enclMeth then - report.log(i"capturing $sym in ${sym.enclosingMethod}, referenced from $enclMeth") - captured += sym - case _ => - traverseChildren(tree) -end CapturedVars diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index b15a58b98b6f..9833b3cf177f 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -596,9 +596,9 @@ abstract class Recheck extends Phase, SymTransformer: /** Show tree with rechecked types instead of the types stored in the `.tpe` field */ override def show(tree: untpd.Tree)(using Context): String = - atPhase(thisPhase): - withMode(Mode.Printing): - super.show(addRecheckedTypes.transform(tree.asInstanceOf[tpd.Tree])) + atPhase(thisPhase) { + super.show(addRecheckedTypes.transform(tree.asInstanceOf[tpd.Tree])) + } end Recheck /** A class that can be used to test basic rechecking without any customaization */ diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 5361f37c2a76..7ef552e3661c 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1042,14 +1042,7 @@ class Namer { typer: Typer => tp val rhs1 = typedAheadType(rhs) - val rhsBodyType: TypeBounds = - val bounds = addVariances(rhs1.tpe).toBounds - if sym.is(Sealed) then - sym.resetFlag(Sealed) - bounds.derivedTypeBounds(bounds.lo, - AnnotatedType(bounds.hi, Annotation(defn.Caps_SealedAnnot, rhs1.span))) - else bounds - + val rhsBodyType: TypeBounds = addVariances(rhs1.tpe).toBounds val unsafeInfo = if (isDerived) rhsBodyType else abstracted(rhsBodyType) def opaqueToBounds(info: Type): Type = diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index af279844f370..eef88e76971e 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -267,9 +267,6 @@ object RefChecks { if !other.is(Deferred) then checkOverride(subtypeChecker, dcl, other) end checkAll - - // Disabled for capture checking since traits can get different parameter refinements - def checkInheritedTraitParameters: Boolean = true end OverridingPairsChecker /** 1. Check all members of class `clazz` for overriding conditions. @@ -854,7 +851,7 @@ object RefChecks { checkCaseClassInheritanceInvariant() } - if (!clazz.is(Trait) && checker.checkInheritedTraitParameters) { + if (!clazz.is(Trait)) { // check that parameterized base classes and traits are typed in the same way as from the superclass // I.e. say we have // diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index fa89c82fc7e7..798e998ef241 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -44,7 +44,7 @@ class CompilationTests { // Run tests for legacy lazy vals compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.posLazyValsAllowlist)), compileDir("tests/pos-special/java-param-names", defaultOptions.withJavacOnlyOptions("-parameters")), - compileDir("tests/pos-special/stdlib", allowDeepSubtypes), + compileDir("tests/pos-special/stdlib", defaultOptions), ) if scala.util.Properties.isJavaAtLeast("16") then diff --git a/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala b/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala index e69de29bb2d1..477ac6d742f7 100644 --- a/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala +++ b/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala @@ -0,0 +1,12 @@ +package scala.annotation +package unchecked + +/** An annotation for mutable variables that are allowed to capture + * the root capability `cap`. Allowing this is not capture safe since + * it can cause leakage of capabilities from local scopes by assigning + * values retaining such capabilties to the annotated variable in + * an outer scope. + */ +class uncheckedCaptures extends StaticAnnotation + + diff --git a/library/src/scala/annotation/unchecked/uncheckedCaptures.scala b/library/src/scala/annotation/unchecked/uncheckedCaptures.scala deleted file mode 100644 index 477ac6d742f7..000000000000 --- a/library/src/scala/annotation/unchecked/uncheckedCaptures.scala +++ /dev/null @@ -1,12 +0,0 @@ -package scala.annotation -package unchecked - -/** An annotation for mutable variables that are allowed to capture - * the root capability `cap`. Allowing this is not capture safe since - * it can cause leakage of capabilities from local scopes by assigning - * values retaining such capabilties to the annotated variable in - * an outer scope. - */ -class uncheckedCaptures extends StaticAnnotation - - diff --git a/tests/neg-custom-args/captures/buffers.check b/tests/neg-custom-args/captures/buffers.check deleted file mode 100644 index 07acea3c48e3..000000000000 --- a/tests/neg-custom-args/captures/buffers.check +++ /dev/null @@ -1,26 +0,0 @@ --- Error: tests/neg-custom-args/captures/buffers.scala:11:6 ------------------------------------------------------------ -11 | var elems: Array[A] = new Array[A](10) // error // error - | ^ - | Mutable variable elems cannot have type Array[A] since - | that type refers to the type variable A, which is not sealed. --- Error: tests/neg-custom-args/captures/buffers.scala:16:38 ----------------------------------------------------------- -16 | def make[A: ClassTag](xs: A*) = new ArrayBuffer: // error - | ^^^^^^^^^^^ - | Sealed type variable A cannot be instantiated to box A^? since - | that type refers to the type variable A, which is not sealed. - | This is often caused by a local capability in an argument of constructor ArrayBuffer - | leaking as part of its result. --- Error: tests/neg-custom-args/captures/buffers.scala:11:13 ----------------------------------------------------------- -11 | var elems: Array[A] = new Array[A](10) // error // error - | ^^^^^^^^ - | Array cannot have element type A since - | that type variable is not sealed. - | Since arrays are mutable, they have to be treated like variables, - | so their element type must be sealed. --- Error: tests/neg-custom-args/captures/buffers.scala:22:9 ------------------------------------------------------------ -22 | val x: Array[A] = new Array[A](10) // error - | ^^^^^^^^ - | Array cannot have element type A since - | that type variable is not sealed. - | Since arrays are mutable, they have to be treated like variables, - | so their element type must be sealed. diff --git a/tests/neg-custom-args/captures/buffers.scala b/tests/neg-custom-args/captures/buffers.scala deleted file mode 100644 index 760ddab96ae5..000000000000 --- a/tests/neg-custom-args/captures/buffers.scala +++ /dev/null @@ -1,30 +0,0 @@ -import reflect.ClassTag - -class Buffer[A] - -class ArrayBuffer[sealed A: ClassTag] extends Buffer[A]: - var elems: Array[A] = new Array[A](10) - def add(x: A): this.type = ??? - def at(i: Int): A = ??? - -class ArrayBufferBAD[A: ClassTag] extends Buffer[A]: - var elems: Array[A] = new Array[A](10) // error // error - def add(x: A): this.type = ??? - def at(i: Int): A = ??? - -object ArrayBuffer: - def make[A: ClassTag](xs: A*) = new ArrayBuffer: // error - elems = xs.toArray - def apply[sealed A: ClassTag](xs: A*) = new ArrayBuffer: - elems = xs.toArray // ok - -class EncapsArray[A: ClassTag]: - val x: Array[A] = new Array[A](10) // error - - - - - - - - diff --git a/tests/neg-custom-args/captures/cc-this.check b/tests/neg-custom-args/captures/cc-this.check index 070e815d6d45..335302c5c259 100644 --- a/tests/neg-custom-args/captures/cc-this.check +++ b/tests/neg-custom-args/captures/cc-this.check @@ -12,4 +12,4 @@ -- Error: tests/neg-custom-args/captures/cc-this.scala:17:8 ------------------------------------------------------------ 17 | class C4(val f: () => Int) extends C3 // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - |reference (C4.this.f : () => Int) captured by this self type is not included in the allowed capture set {} of pure base class class C3 + | reference (C4.this.f : () => Int) is not included in the allowed capture set {} of pure base class class C3 diff --git a/tests/neg-custom-args/captures/cc-this2.check b/tests/neg-custom-args/captures/cc-this2.check index bd9a1085d262..5e43a45b67f5 100644 --- a/tests/neg-custom-args/captures/cc-this2.check +++ b/tests/neg-custom-args/captures/cc-this2.check @@ -1,12 +1,6 @@ --- Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:3:8 -------------------------------------------------------- -3 | this: D^ => // error - | ^^ - |reference (caps.cap : caps.Cap) captured by this self type is not included in the allowed capture set {} of pure base class class C --- [E058] Type Mismatch Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 ----------------------------------- +-- Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 -------------------------------------------------------- 2 |class D extends C: // error - | ^ - | illegal inheritance: self type D^ of class D does not conform to self type C - | of parent class C - | - | longer explanation available when compiling with `-explain` + |^ + |reference (caps.cap : caps.Cap) is not included in the allowed capture set {} of pure base class class C +3 | this: D^ => diff --git a/tests/neg-custom-args/captures/cc-this2/D_2.scala b/tests/neg-custom-args/captures/cc-this2/D_2.scala index de1a722f73a9..b22e5e456092 100644 --- a/tests/neg-custom-args/captures/cc-this2/D_2.scala +++ b/tests/neg-custom-args/captures/cc-this2/D_2.scala @@ -1,3 +1,3 @@ class D extends C: // error - this: D^ => // error + this: D^ => diff --git a/tests/neg-custom-args/captures/exception-definitions.check b/tests/neg-custom-args/captures/exception-definitions.check index 72b88f252e59..16d623e64f7c 100644 --- a/tests/neg-custom-args/captures/exception-definitions.check +++ b/tests/neg-custom-args/captures/exception-definitions.check @@ -1,12 +1,13 @@ --- Error: tests/neg-custom-args/captures/exception-definitions.scala:3:8 ----------------------------------------------- -3 | self: Err^ => // error - | ^^^^ - |reference (caps.cap : caps.Cap) captured by this self type is not included in the allowed capture set {} of pure base class class Throwable +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:2:6 ----------------------------------------------- +2 |class Err extends Exception: // error + |^ + |reference (caps.cap : caps.Cap) is not included in the allowed capture set {} of pure base class class Throwable +3 | self: Err^ => -- Error: tests/neg-custom-args/captures/exception-definitions.scala:7:12 ---------------------------------------------- 7 | val x = c // error | ^ - |(c : Any^) cannot be referenced here; it is not included in the allowed capture set {} of the self type of class Err2 --- Error: tests/neg-custom-args/captures/exception-definitions.scala:8:13 ---------------------------------------------- + |(c : Any^) cannot be referenced here; it is not included in the allowed capture set {} of pure base class class Throwable +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:8:8 ----------------------------------------------- 8 | class Err3(c: Any^) extends Exception // error - | ^ - | reference (Err3.this.c : Any^) is not included in the allowed capture set {} of the self type of class Err3 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | reference (Err3.this.c : Any^) is not included in the allowed capture set {} of pure base class class Throwable diff --git a/tests/neg-custom-args/captures/exception-definitions.scala b/tests/neg-custom-args/captures/exception-definitions.scala index fbc9f3fd1d33..a19b751825b8 100644 --- a/tests/neg-custom-args/captures/exception-definitions.scala +++ b/tests/neg-custom-args/captures/exception-definitions.scala @@ -1,6 +1,6 @@ -class Err extends Exception: - self: Err^ => // error +class Err extends Exception: // error + self: Err^ => def test(c: Any^) = class Err2 extends Exception: diff --git a/tests/neg-custom-args/captures/filevar.scala b/tests/neg-custom-args/captures/filevar.scala index 34588617c0b8..c8280e2ff3b7 100644 --- a/tests/neg-custom-args/captures/filevar.scala +++ b/tests/neg-custom-args/captures/filevar.scala @@ -5,7 +5,7 @@ class File: def write(x: String): Unit = ??? class Service: - var file: File^{cap[Service]} = uninitialized // error + var file: File^{cap[Service]} = uninitialized def log = file.write("log") def withFile[T](op: (l: caps.Cap) ?-> (f: File^{l}) => T): T = diff --git a/tests/neg-custom-args/captures/leaked-curried.check b/tests/neg-custom-args/captures/leaked-curried.check index 3f0a9800a4ec..c23d1516acf5 100644 --- a/tests/neg-custom-args/captures/leaked-curried.check +++ b/tests/neg-custom-args/captures/leaked-curried.check @@ -2,7 +2,10 @@ 14 | () => () => io // error | ^^ |(io : Cap^) cannot be referenced here; it is not included in the allowed capture set {} of the self type of class Fuzz --- Error: tests/neg-custom-args/captures/leaked-curried.scala:17:20 ---------------------------------------------------- -17 | () => () => io // error - | ^^ - |(io : Cap^) cannot be referenced here; it is not included in the allowed capture set {} of the self type of class Foo +-- [E058] Type Mismatch Error: tests/neg-custom-args/captures/leaked-curried.scala:15:10 ------------------------------- +15 | class Foo extends Box, Pure: // error + | ^ + | illegal inheritance: self type Foo^{io} of class Foo does not conform to self type Pure + | of parent trait Pure + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/leaked-curried.scala b/tests/neg-custom-args/captures/leaked-curried.scala index f9238259e065..a7c48219b450 100644 --- a/tests/neg-custom-args/captures/leaked-curried.scala +++ b/tests/neg-custom-args/captures/leaked-curried.scala @@ -12,8 +12,8 @@ def main(): Unit = self => val get: () ->{} () ->{io} Cap^ = () => () => io // error - class Foo extends Box, Pure: + class Foo extends Box, Pure: // error val get: () ->{} () ->{io} Cap^ = - () => () => io // error + () => () => io new Foo val bad = leaked.get()().use() // using a leaked capability diff --git a/tests/neg-custom-args/captures/levels.check b/tests/neg-custom-args/captures/levels.check index c0cc7f0a759c..f91f90fb652f 100644 --- a/tests/neg-custom-args/captures/levels.check +++ b/tests/neg-custom-args/captures/levels.check @@ -1,8 +1,8 @@ -- Error: tests/neg-custom-args/captures/levels.scala:6:16 ------------------------------------------------------------- 6 | private var v: T = init // error | ^ - | Mutable variable v cannot have type T since - | that type variable is not sealed. + | mutable variable v cannot have type T since + | that type refers to the type variable T, which is not sealed. -- Error: tests/neg-custom-args/captures/levels.scala:17:13 ------------------------------------------------------------ 17 | val _ = Ref[String => String]((x: String) => x) // error | ^^^^^^^^^^^^^^^^^^^^^ diff --git a/tests/neg-custom-args/captures/localcaps.check b/tests/neg-custom-args/captures/localcaps.check deleted file mode 100644 index b09702749d10..000000000000 --- a/tests/neg-custom-args/captures/localcaps.check +++ /dev/null @@ -1,12 +0,0 @@ --- Error: tests/neg-custom-args/captures/localcaps.scala:4:12 ---------------------------------------------------------- -4 | def x: C^{cap[d]} = ??? // error - | ^^^^^^ - | `d` does not name an outer definition that represents a capture level --- Error: tests/neg-custom-args/captures/localcaps.scala:9:47 ---------------------------------------------------------- -9 | private val z2 = identity((x: Int) => (c: C^{cap[z2]}) => x) // error - | ^^^^^^^ - | `z2` does not name an outer definition that represents a capture level --- Error: tests/neg-custom-args/captures/localcaps.scala:6:6 ----------------------------------------------------------- -6 | def y: C^{cap[C]} = ??? // error - | ^ - | local root (cap[C] : caps.Cap) cannot appear in type of class C diff --git a/tests/neg-custom-args/captures/localcaps.scala b/tests/neg-custom-args/captures/localcaps.scala index 049a1ee0d775..f5227bfef96b 100644 --- a/tests/neg-custom-args/captures/localcaps.scala +++ b/tests/neg-custom-args/captures/localcaps.scala @@ -3,7 +3,7 @@ class C: def x: C^{cap[d]} = ??? // error - def y: C^{cap[C]} = ??? // error + def y: C^{cap[C]} = ??? // ok private val z = (c0: caps.Cap) => (x: Int) => (c: C^{cap[C]}) => x // ok private val z2 = identity((x: Int) => (c: C^{cap[z2]}) => x) // error diff --git a/tests/neg-custom-args/captures/pairs.check b/tests/neg-custom-args/captures/pairs.check index 9d1b3a76e164..38712469879f 100644 --- a/tests/neg-custom-args/captures/pairs.check +++ b/tests/neg-custom-args/captures/pairs.check @@ -12,11 +12,3 @@ | Required: Cap^ ->{d} Unit | | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/pairs.scala:6:8 --------------------------------------------------------------- -6 | def fst: Cap^{cap[Pair]} ->{x} Unit = x // error - | ^ - | local root (cap[Pair] : caps.Cap) cannot appear in type of class Pair --- Error: tests/neg-custom-args/captures/pairs.scala:7:8 --------------------------------------------------------------- -7 | def snd: Cap^{cap[Pair]} ->{y} Unit = y // error - | ^ - | local root (cap[Pair] : caps.Cap) cannot appear in type of class Pair diff --git a/tests/neg-custom-args/captures/pairs.scala b/tests/neg-custom-args/captures/pairs.scala index 99b27639f729..4fc495d60f95 100644 --- a/tests/neg-custom-args/captures/pairs.scala +++ b/tests/neg-custom-args/captures/pairs.scala @@ -3,8 +3,8 @@ object Monomorphic2: class Pair(x: Cap => Unit, y: Cap => Unit): - def fst: Cap^{cap[Pair]} ->{x} Unit = x // error - def snd: Cap^{cap[Pair]} ->{y} Unit = y // error + def fst: Cap^{cap[Pair]} ->{x} Unit = x + def snd: Cap^{cap[Pair]} ->{y} Unit = y def test(c: Cap, d: Cap) = def f(x: Cap): Unit = if c == x then () diff --git a/tests/neg-custom-args/captures/recursive-leaking-local-cap.scala b/tests/neg-custom-args/captures/recursive-leaking-local-cap.scala deleted file mode 100644 index 0daecafbf9d0..000000000000 --- a/tests/neg-custom-args/captures/recursive-leaking-local-cap.scala +++ /dev/null @@ -1,22 +0,0 @@ -import language.experimental.captureChecking -trait Cap: - def use: Int = 42 - -def usingCap[sealed T](op: Cap^ => T): T = ??? - -def badTest(): Unit = - def bad(b: Boolean)(c: Cap^): Cap^{cap[bad]} = // error - if b then c - else - val leaked = usingCap[Cap^{cap[bad]}](bad(true)) - leaked.use // boom - c - - usingCap[Unit]: c0 => - bad(false)(c0) - -class Bad: - def foo: Cap^{cap[Bad]} = ??? // error - private def bar: Cap^{cap[Bad]} = ??? // ok - - diff --git a/tests/neg-custom-args/captures/sealed-classes.scala b/tests/neg-custom-args/captures/sealed-classes.scala deleted file mode 100644 index b8cb0acbf5c5..000000000000 --- a/tests/neg-custom-args/captures/sealed-classes.scala +++ /dev/null @@ -1,21 +0,0 @@ -abstract class C1[A1]: - def set(x: A1): Unit - def get: A1 - -trait Co[+A]: - def get: A - -class C2[sealed A2] extends C1[A2], Co[A2]: // ok - private var x: A2 = ??? - def set(x: A2): Unit = - this.x = x - def get: A2 = x - -class C3[A3] extends C2[A3] // error - -abstract class C4[sealed A4] extends Co[A4] // ok - -abstract class C5[sealed +A5] extends Co[A5] // ok - -abstract class C6[A6] extends C5[A6] // error - diff --git a/tests/neg-custom-args/captures/sealed-leaks.check b/tests/neg-custom-args/captures/sealed-leaks.check deleted file mode 100644 index f7098eba32b6..000000000000 --- a/tests/neg-custom-args/captures/sealed-leaks.check +++ /dev/null @@ -1,50 +0,0 @@ --- [E129] Potential Issue Warning: tests/neg-custom-args/captures/sealed-leaks.scala:31:6 ------------------------------ -31 | () - | ^^ - | A pure expression does nothing in statement position - | - | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/sealed-leaks.scala:12:27 ------------------------------------------------------ -12 | val later2 = usingLogFile[(() => Unit) | Null] { f => () => f.write(0) } // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Sealed type variable T cannot be instantiated to (() => Unit) | Null since - | that type captures the root capability `cap`. - | This is often caused by a local capability in an argument of method usingLogFile - | leaking as part of its result. --- [E007] Type Mismatch Error: tests/neg-custom-args/captures/sealed-leaks.scala:19:26 --------------------------------- -19 | usingLogFile { f => x = f } // error - | ^ - | Found: (f : java.io.FileOutputStream^) - | Required: (java.io.FileOutputStream | Null)^{cap[Test2]} - | - | longer explanation available when compiling with `-explain` --- Error: tests/neg-custom-args/captures/sealed-leaks.scala:30:10 ------------------------------------------------------ -30 | var x: T = y // error - | ^ - | Mutable variable x cannot have type T since - | that type variable is not sealed. --- Error: tests/neg-custom-args/captures/sealed-leaks.scala:39:8 ------------------------------------------------------- -39 | var x: T = y // error - | ^ - | Mutable variable x cannot have type T since - | that type variable is not sealed. - | - | Note that variable x does not count as local since it is captured by an anonymous function --- Error: tests/neg-custom-args/captures/sealed-leaks.scala:43:8 ------------------------------------------------------- -43 | var x: T = y // error - | ^ - |Mutable variable x cannot have type T since - |that type variable is not sealed. - | - |Note that variable x does not count as local since it is captured by an anonymous function argument in a call to method identity --- Error: tests/neg-custom-args/captures/sealed-leaks.scala:47:8 ------------------------------------------------------- -47 | var x: T = y // error - | ^ - | Mutable variable x cannot have type T since - | that type variable is not sealed. - | - | Note that variable x does not count as local since it is captured by method foo --- Error: tests/neg-custom-args/captures/sealed-leaks.scala:11:14 ------------------------------------------------------ -11 | val later = usingLogFile { f => () => f.write(0) } // error - | ^^^^^^^^^^^^ - | local reference f leaks into outer capture set of type parameter T of method usingLogFile diff --git a/tests/neg-custom-args/captures/sealed-leaks.scala b/tests/neg-custom-args/captures/sealed-leaks.scala index 2555ba8a3e07..a7acf77b5678 100644 --- a/tests/neg-custom-args/captures/sealed-leaks.scala +++ b/tests/neg-custom-args/captures/sealed-leaks.scala @@ -18,34 +18,4 @@ def Test2 = usingLogFile { f => x = f } // error - later() - -def Test3 = - def f[T](y: T) = - var x: T = y - () - - class C[T](y: T): - object o: - var x: T = y // error - () - - class C2[T](y: T): - def f = - var x: T = y // ok - () - - def g1[T](y: T): T => Unit = - var x: T = y // error - y => x = y - - def g2[T](y: T): T => Unit = - var x: T = y // error - identity(y => x = y) - - def g3[T](y: T): Unit = - var x: T = y // error - def foo = - x = y - () - + later() \ No newline at end of file diff --git a/tests/neg-custom-args/captures/sealed-refs.scala b/tests/neg-custom-args/captures/sealed-refs.scala deleted file mode 100644 index 05fa483acf28..000000000000 --- a/tests/neg-custom-args/captures/sealed-refs.scala +++ /dev/null @@ -1,42 +0,0 @@ -class Ref[sealed A](init: A): - this: Ref[A]^ => - private var x: A = init - def get: A = x - def set(x: A): Unit = this.x = x - -class It[X]: - this: It[X]^ => - -def f1[B1](x: B1, next: B1 -> B1) = - var r = x // ok - r = next(x) - r - -def f2[B2](x: B2, next: B2 -> B2) = - val r = Ref[B2](x) // error - r.set(next(x)) - r.get - -def g[sealed B](x: B, next: B -> B) = - val r = Ref[B](x) // ok - r.set(next(x)) - r.get - -import annotation.unchecked.uncheckedCaptures - -def h[B](x: B, next: B -> B) = - val r = Ref[B @uncheckedCaptures](x) // ok - r.set(next(x)) - r.get - -def f3[B](x: B, next: B -> B) = - val r: Ref[B^{cap[f3]}] = Ref[B^{cap[f3]}](x) // error - r.set(next(x)) - val y = r.get - () - -def f4[B](x: B, next: B -> B) = - val r: Ref[B]^{cap[f4]} = Ref[B](x) // error - r.set(next(x)) - val y = r.get - () \ No newline at end of file diff --git a/tests/neg/class-mods.scala b/tests/neg/class-mods.scala index cf4348ad42d7..60e9fb279364 100644 --- a/tests/neg/class-mods.scala +++ b/tests/neg/class-mods.scala @@ -2,7 +2,7 @@ open final class Foo1 // error sealed open class Foo2 // error open type T1 // error -type T2 // ok +sealed type T2 // error abstract type T3 // error abstract open type T4 // error diff --git a/tests/pos-custom-args/captures/sealed-lowerbound.scala b/tests/pos-custom-args/captures/sealed-lowerbound.scala deleted file mode 100644 index e848f784cddc..000000000000 --- a/tests/pos-custom-args/captures/sealed-lowerbound.scala +++ /dev/null @@ -1,12 +0,0 @@ -def foo[sealed B](x: B): B = x - -def bar[B, sealed A >: B](x: A): A = foo[A](x) - -class C[sealed A] - -class CV[sealed A](x: Int): - def this() = this: - val x = new C[A]: - println("foo") - 0 - diff --git a/tests/pos-custom-args/captures/sealed-value-class.scala b/tests/pos-custom-args/captures/sealed-value-class.scala deleted file mode 100644 index b5f25bf2d203..000000000000 --- a/tests/pos-custom-args/captures/sealed-value-class.scala +++ /dev/null @@ -1,3 +0,0 @@ -class Ops[sealed A](xs: Array[A]) extends AnyVal: - - def f(p: A => Boolean): Array[A] = xs diff --git a/tests/pos-custom-args/captures/steppers.scala b/tests/pos-custom-args/captures/steppers.scala deleted file mode 100644 index 815ac938b492..000000000000 --- a/tests/pos-custom-args/captures/steppers.scala +++ /dev/null @@ -1,27 +0,0 @@ - -trait Stepper[+A]: - this: Stepper[A]^ => - -object Stepper: - trait EfficientSplit - -sealed trait StepperShape[-T, S <: Stepper[_]^] extends Pure - -trait IterableOnce[+A] extends Any: - this: IterableOnce[A]^ => - def stepper[S <: Stepper[_]^{this}](implicit shape: StepperShape[A, S]): S = ??? - -sealed abstract class ArraySeq[sealed T] extends IterableOnce[T], Pure: - def array: Array[_] - - def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq[T] = - val arr = array.asInstanceOf[Array[T]].sorted(ord.asInstanceOf[Ordering[Any]]).asInstanceOf[Array[T]] - ArraySeq.make(arr).asInstanceOf[ArraySeq[T]] - -object ArraySeq: - - def make[sealed T](x: Array[T]): ArraySeq[T] = ??? - - final class ofRef[T <: AnyRef](val array: Array[T]) extends ArraySeq[T], Pure: - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S & Stepper.EfficientSplit = ??? - diff --git a/tests/pos-special/stdlib/collection/ArrayOps.scala b/tests/pos-special/stdlib/collection/ArrayOps.scala deleted file mode 100644 index a52fd0dbd162..000000000000 --- a/tests/pos-special/stdlib/collection/ArrayOps.scala +++ /dev/null @@ -1,1664 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import java.lang.Math.{max, min} -import java.util.Arrays -import language.experimental.captureChecking - -import scala.Predef.{ // unimport all array-related implicit conversions to avoid triggering them accidentally - genericArrayOps => _, - booleanArrayOps => _, - byteArrayOps => _, - charArrayOps => _, - doubleArrayOps => _, - floatArrayOps => _, - intArrayOps => _, - longArrayOps => _, - refArrayOps => _, - shortArrayOps => _, - unitArrayOps => _, - genericWrapArray => _, - wrapRefArray => _, - wrapIntArray => _, - wrapDoubleArray => _, - wrapLongArray => _, - wrapFloatArray => _, - wrapCharArray => _, - wrapByteArray => _, - wrapShortArray => _, - wrapBooleanArray => _, - wrapUnitArray => _, - wrapString => _, - copyArrayToImmutableIndexedSeq => _, - _ -} -import scala.collection.Stepper.EfficientSplit -import scala.collection.immutable.Range -import scala.collection.mutable.ArrayBuilder -import scala.math.Ordering -import scala.reflect.ClassTag -import scala.util.Sorting - -object ArrayOps { - - @SerialVersionUID(3L) - private class ArrayView[sealed A](xs: Array[A]) extends AbstractIndexedSeqView[A] { - def length = xs.length - def apply(n: Int) = xs(n) - override def toString: String = immutable.ArraySeq.unsafeWrapArray(xs).mkString("ArrayView(", ", ", ")") - } - - /** A lazy filtered array. No filtering is applied until one of `foreach`, `map` or `flatMap` is called. */ - class WithFilter[sealed A](p: A => Boolean, xs: Array[A]) { - - /** Apply `f` to each element for its side effects. - * Note: [U] parameter needed to help scalac's type inference. - */ - def foreach[U](f: A => U): Unit = { - val len = xs.length - var i = 0 - while(i < len) { - val x = xs(i) - if(p(x)) f(x) - i += 1 - } - } - - /** Builds a new array by applying a function to all elements of this array. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned array. - * @return a new array resulting from applying the given function - * `f` to each element of this array and collecting the results. - */ - def map[sealed B: ClassTag](f: A => B): Array[B] = { - val b = ArrayBuilder.make[B] - var i = 0 - while (i < xs.length) { - val x = xs(i) - if(p(x)) b += f(x) - i = i + 1 - } - b.result() - } - - /** Builds a new array by applying a function to all elements of this array - * and using the elements of the resulting collections. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned array. - * @return a new array resulting from applying the given collection-valued function - * `f` to each element of this array and concatenating the results. - */ - def flatMap[sealed B: ClassTag](f: A => IterableOnce[B]): Array[B] = { - val b = ArrayBuilder.make[B] - var i = 0 - while(i < xs.length) { - val x = xs(i) - if(p(x)) b ++= f(xs(i)) - i += 1 - } - b.result() - } - - def flatMap[BS, sealed B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = - flatMap[B](x => asIterable(f(x))) - - /** Creates a new non-strict filter which combines this filter with the given predicate. */ - def withFilter(q: A => Boolean): WithFilter[A]^{this, q} = new WithFilter[A](a => p(a) && q(a), xs) - } - - @SerialVersionUID(3L) - private[collection] final class ArrayIterator[@specialized(Specializable.Everything) sealed A](xs: Array[A]) extends AbstractIterator[A] with Serializable { - private[this] var pos = 0 - private[this] val len = xs.length - override def knownSize: Int = len - pos - def hasNext: Boolean = pos < len - def next(): A = { - if (pos >= xs.length) Iterator.empty.next() - val r = xs(pos) - pos += 1 - r - } - override def drop(n: Int): Iterator[A] = { - if (n > 0) { - val newPos = pos + n - pos = - if (newPos < 0 /* overflow */) len - else Math.min(len, newPos) - } - this - } - } - - @SerialVersionUID(3L) - private final class ReverseIterator[@specialized(Specializable.Everything) sealed A](xs: Array[A]) extends AbstractIterator[A] with Serializable { - private[this] var pos = xs.length-1 - def hasNext: Boolean = pos >= 0 - def next(): A = { - if (pos < 0) Iterator.empty.next() - val r = xs(pos) - pos -= 1 - r - } - - override def drop(n: Int): Iterator[A] = { - if (n > 0) pos = Math.max( -1, pos - n) - this - } - } - - @SerialVersionUID(3L) - private final class GroupedIterator[sealed A](xs: Array[A], groupSize: Int) extends AbstractIterator[Array[A]] with Serializable { - private[this] var pos = 0 - def hasNext: Boolean = pos < xs.length - def next(): Array[A] = { - if(pos >= xs.length) throw new NoSuchElementException - val r = new ArrayOps(xs).slice(pos, pos+groupSize) - pos += groupSize - r - } - } - - /** The cut-off point for the array size after which we switch from `Sorting.stableSort` to - * an implementation that copies the data to a boxed representation for use with `Arrays.sort`. - */ - private final val MaxStableSortLength = 300 - - /** Avoid an allocation in [[collect]]. */ - private val fallback: Any => Any = _ => fallback -} - -/** This class serves as a wrapper for `Array`s with many of the operations found in - * indexed sequences. Where needed, instances of arrays are implicitly converted - * into this class. There is generally no reason to create an instance explicitly or use - * an `ArrayOps` type. It is better to work with plain `Array` types instead and rely on - * the implicit conversion to `ArrayOps` when calling a method (which does not actually - * allocate an instance of `ArrayOps` because it is a value class). - * - * Neither `Array` nor `ArrayOps` are proper collection types - * (i.e. they do not extend `Iterable` or even `IterableOnce`). `mutable.ArraySeq` and - * `immutable.ArraySeq` serve this purpose. - * - * The difference between this class and `ArraySeq`s is that calling transformer methods such as - * `filter` and `map` will yield an array, whereas an `ArraySeq` will remain an `ArraySeq`. - * - * @tparam A type of the elements contained in this array. - */ -final class ArrayOps[sealed A](private val xs: Array[A]) extends AnyVal { - - @`inline` private[this] implicit def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) - - /** The size of this array. - * - * @return the number of elements in this array. - */ - @`inline` def size: Int = xs.length - - /** The size of this array. - * - * @return the number of elements in this array. - */ - @`inline` def knownSize: Int = xs.length - - /** Tests whether the array is empty. - * - * @return `true` if the array contains no elements, `false` otherwise. - */ - @`inline` def isEmpty: Boolean = xs.length == 0 - - /** Tests whether the array is not empty. - * - * @return `true` if the array contains at least one element, `false` otherwise. - */ - @`inline` def nonEmpty: Boolean = xs.length != 0 - - /** Selects the first element of this array. - * - * @return the first element of this array. - * @throws NoSuchElementException if the array is empty. - */ - def head: A = if (nonEmpty) xs.apply(0) else throw new NoSuchElementException("head of empty array") - - /** Selects the last element. - * - * @return The last element of this array. - * @throws NoSuchElementException If the array is empty. - */ - def last: A = if (nonEmpty) xs.apply(xs.length-1) else throw new NoSuchElementException("last of empty array") - - /** Optionally selects the first element. - * - * @return the first element of this array if it is nonempty, - * `None` if it is empty. - */ - def headOption: Option[A] = if(isEmpty) None else Some(head) - - /** Optionally selects the last element. - * - * @return the last element of this array$ if it is nonempty, - * `None` if it is empty. - */ - def lastOption: Option[A] = if(isEmpty) None else Some(last) - - /** Compares the size of this array to a test value. - * - * @param otherSize the test value that gets compared with the size. - * @return A value `x` where - * {{{ - * x < 0 if this.size < otherSize - * x == 0 if this.size == otherSize - * x > 0 if this.size > otherSize - * }}} - */ - def sizeCompare(otherSize: Int): Int = Integer.compare(xs.length, otherSize) - - /** Compares the length of this array to a test value. - * - * @param len the test value that gets compared with the length. - * @return A value `x` where - * {{{ - * x < 0 if this.length < len - * x == 0 if this.length == len - * x > 0 if this.length > len - * }}} - */ - def lengthCompare(len: Int): Int = Integer.compare(xs.length, len) - - /** Method mirroring [[SeqOps.sizeIs]] for consistency, except it returns an `Int` - * because `size` is known and comparison is constant-time. - * - * These operations are equivalent to [[sizeCompare(Int) `sizeCompare(Int)`]], and - * allow the following more readable usages: - * - * {{{ - * this.sizeIs < size // this.sizeCompare(size) < 0 - * this.sizeIs <= size // this.sizeCompare(size) <= 0 - * this.sizeIs == size // this.sizeCompare(size) == 0 - * this.sizeIs != size // this.sizeCompare(size) != 0 - * this.sizeIs >= size // this.sizeCompare(size) >= 0 - * this.sizeIs > size // this.sizeCompare(size) > 0 - * }}} - */ - def sizeIs: Int = xs.length - - /** Method mirroring [[SeqOps.lengthIs]] for consistency, except it returns an `Int` - * because `length` is known and comparison is constant-time. - * - * These operations are equivalent to [[lengthCompare(Int) `lengthCompare(Int)`]], and - * allow the following more readable usages: - * - * {{{ - * this.lengthIs < len // this.lengthCompare(len) < 0 - * this.lengthIs <= len // this.lengthCompare(len) <= 0 - * this.lengthIs == len // this.lengthCompare(len) == 0 - * this.lengthIs != len // this.lengthCompare(len) != 0 - * this.lengthIs >= len // this.lengthCompare(len) >= 0 - * this.lengthIs > len // this.lengthCompare(len) > 0 - * }}} - */ - def lengthIs: Int = xs.length - - /** Selects an interval of elements. The returned array is made up - * of all elements `x` which satisfy the invariant: - * {{{ - * from <= indexOf(x) < until - * }}} - * - * @param from the lowest index to include from this array. - * @param until the lowest index to EXCLUDE from this array. - * @return an array containing the elements greater than or equal to - * index `from` extending up to (but not including) index `until` - * of this array. - */ - def slice(from: Int, until: Int): Array[A] = { - import java.util.Arrays.copyOfRange - val lo = max(from, 0) - val hi = min(until, xs.length) - if (hi > lo) { - (((xs: Array[_]): @unchecked) match { - case x: Array[AnyRef] => copyOfRange(x, lo, hi) - case x: Array[Int] => copyOfRange(x, lo, hi) - case x: Array[Double] => copyOfRange(x, lo, hi) - case x: Array[Long] => copyOfRange(x, lo, hi) - case x: Array[Float] => copyOfRange(x, lo, hi) - case x: Array[Char] => copyOfRange(x, lo, hi) - case x: Array[Byte] => copyOfRange(x, lo, hi) - case x: Array[Short] => copyOfRange(x, lo, hi) - case x: Array[Boolean] => copyOfRange(x, lo, hi) - }).asInstanceOf[Array[A]] - } else new Array[A](0) - } - - /** The rest of the array without its first element. */ - def tail: Array[A] = - if(xs.length == 0) throw new UnsupportedOperationException("tail of empty array") else slice(1, xs.length) - - /** The initial part of the array without its last element. */ - def init: Array[A] = - if(xs.length == 0) throw new UnsupportedOperationException("init of empty array") else slice(0, xs.length-1) - - /** Iterates over the tails of this array. The first value will be this - * array and the final one will be an empty array, with the intervening - * values the results of successive applications of `tail`. - * - * @return an iterator over all the tails of this array - */ - def tails: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).tail) - - /** Iterates over the inits of this array. The first value will be this - * array and the final one will be an empty array, with the intervening - * values the results of successive applications of `init`. - * - * @return an iterator over all the inits of this array - */ - def inits: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).init) - - // A helper for tails and inits. - private[this] def iterateUntilEmpty(f: Array[A] => Array[A]): Iterator[Array[A]]^{f} = - Iterator.iterate(xs)(f).takeWhile(x => x.length != 0) ++ Iterator.single(Array.empty[A]) - - /** An array containing the first `n` elements of this array. */ - def take(n: Int): Array[A] = slice(0, n) - - /** The rest of the array without its `n` first elements. */ - def drop(n: Int): Array[A] = slice(n, xs.length) - - /** An array containing the last `n` elements of this array. */ - def takeRight(n: Int): Array[A] = drop(xs.length - max(n, 0)) - - /** The rest of the array without its `n` last elements. */ - def dropRight(n: Int): Array[A] = take(xs.length - max(n, 0)) - - /** Takes longest prefix of elements that satisfy a predicate. - * - * @param p The predicate used to test elements. - * @return the longest prefix of this array whose elements all satisfy - * the predicate `p`. - */ - def takeWhile(p: A => Boolean): Array[A] = { - val i = indexWhere(x => !p(x)) - val hi = if(i < 0) xs.length else i - slice(0, hi) - } - - /** Drops longest prefix of elements that satisfy a predicate. - * - * @param p The predicate used to test elements. - * @return the longest suffix of this array whose first element - * does not satisfy the predicate `p`. - */ - def dropWhile(p: A => Boolean): Array[A] = { - val i = indexWhere(x => !p(x)) - val lo = if(i < 0) xs.length else i - slice(lo, xs.length) - } - - def iterator: Iterator[A] = - ((xs: Any @unchecked) match { - case xs: Array[AnyRef] => new ArrayOps.ArrayIterator(xs) - case xs: Array[Int] => new ArrayOps.ArrayIterator(xs) - case xs: Array[Double] => new ArrayOps.ArrayIterator(xs) - case xs: Array[Long] => new ArrayOps.ArrayIterator(xs) - case xs: Array[Float] => new ArrayOps.ArrayIterator(xs) - case xs: Array[Char] => new ArrayOps.ArrayIterator(xs) - case xs: Array[Byte] => new ArrayOps.ArrayIterator(xs) - case xs: Array[Short] => new ArrayOps.ArrayIterator(xs) - case xs: Array[Boolean] => new ArrayOps.ArrayIterator(xs) - case xs: Array[Unit] => new ArrayOps.ArrayIterator(xs) - case null => throw new NullPointerException - }).asInstanceOf[Iterator[A]] - - def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { - import convert.impl._ - val s = (shape.shape: @unchecked) match { - case StepperShape.ReferenceShape => (xs: Any) match { - case bs: Array[Boolean] => new BoxedBooleanArrayStepper(bs, 0, xs.length) - case _ => new ObjectArrayStepper[AnyRef](xs.asInstanceOf[Array[AnyRef ]], 0, xs.length) - } - case StepperShape.IntShape => new IntArrayStepper (xs.asInstanceOf[Array[Int ]], 0, xs.length) - case StepperShape.LongShape => new LongArrayStepper (xs.asInstanceOf[Array[Long ]], 0, xs.length) - case StepperShape.DoubleShape => new DoubleArrayStepper (xs.asInstanceOf[Array[Double ]], 0, xs.length) - case StepperShape.ByteShape => new WidenedByteArrayStepper (xs.asInstanceOf[Array[Byte ]], 0, xs.length) - case StepperShape.ShortShape => new WidenedShortArrayStepper (xs.asInstanceOf[Array[Short ]], 0, xs.length) - case StepperShape.CharShape => new WidenedCharArrayStepper (xs.asInstanceOf[Array[Char ]], 0, xs.length) - case StepperShape.FloatShape => new WidenedFloatArrayStepper (xs.asInstanceOf[Array[Float ]], 0, xs.length) - } - s.asInstanceOf[S with EfficientSplit] - } - - /** Partitions elements in fixed size arrays. - * @see [[scala.collection.Iterator]], method `grouped` - * - * @param size the number of elements per group - * @return An iterator producing arrays of size `size`, except the - * last will be less than size `size` if the elements don't divide evenly. - */ - def grouped(size: Int): Iterator[Array[A]] = new ArrayOps.GroupedIterator[A](xs, size) - - /** Splits this array into a prefix/suffix pair according to a predicate. - * - * Note: `c span p` is equivalent to (but more efficient than) - * `(c takeWhile p, c dropWhile p)`, provided the evaluation of the - * predicate `p` does not cause any side-effects. - * - * @param p the test predicate - * @return a pair consisting of the longest prefix of this array whose - * elements all satisfy `p`, and the rest of this array. - */ - def span(p: A => Boolean): (Array[A], Array[A]) = { - val i = indexWhere(x => !p(x)) - val idx = if(i < 0) xs.length else i - (slice(0, idx), slice(idx, xs.length)) - } - - /** Splits this array into two at a given position. - * Note: `c splitAt n` is equivalent to `(c take n, c drop n)`. - * - * @param n the position at which to split. - * @return a pair of arrays consisting of the first `n` - * elements of this array, and the other elements. - */ - def splitAt(n: Int): (Array[A], Array[A]) = (take(n), drop(n)) - - /** A pair of, first, all elements that satisfy predicate `p` and, second, all elements that do not. */ - def partition(p: A => Boolean): (Array[A], Array[A]) = { - val res1, res2 = ArrayBuilder.make[A] - var i = 0 - while(i < xs.length) { - val x = xs(i) - (if(p(x)) res1 else res2) += x - i += 1 - } - (res1.result(), res2.result()) - } - - /** Applies a function `f` to each element of the array and returns a pair of arrays: the first one - * made of those values returned by `f` that were wrapped in [[scala.util.Left]], and the second - * one made of those wrapped in [[scala.util.Right]]. - * - * Example: - * {{{ - * val xs = Array(1, "one", 2, "two", 3, "three") partitionMap { - * case i: Int => Left(i) - * case s: String => Right(s) - * } - * // xs == (Array(1, 2, 3), - * // Array(one, two, three)) - * }}} - * - * @tparam A1 the element type of the first resulting collection - * @tparam A2 the element type of the second resulting collection - * @param f the 'split function' mapping the elements of this array to an [[scala.util.Either]] - * - * @return a pair of arrays: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], - * and the second one made of those wrapped in [[scala.util.Right]]. */ - def partitionMap[sealed A1: ClassTag, sealed A2: ClassTag](f: A => Either[A1, A2]): (Array[A1], Array[A2]) = { - val res1 = ArrayBuilder.make[A1] - val res2 = ArrayBuilder.make[A2] - var i = 0 - while(i < xs.length) { - f(xs(i)) match { - case Left(x) => res1 += x - case Right(x) => res2 += x - } - i += 1 - } - (res1.result(), res2.result()) - } - - /** Returns a new array with the elements in reversed order. */ - @inline def reverse: Array[A] = { - val len = xs.length - val res = new Array[A](len) - var i = 0 - while(i < len) { - res(len-i-1) = xs(i) - i += 1 - } - res - } - - /** An iterator yielding elements in reversed order. - * - * Note: `xs.reverseIterator` is the same as `xs.reverse.iterator` but implemented more efficiently. - * - * @return an iterator yielding the elements of this array in reversed order - */ - def reverseIterator: Iterator[A] = - ((xs: Any @unchecked) match { - case xs: Array[AnyRef] => new ArrayOps.ReverseIterator(xs) - case xs: Array[Int] => new ArrayOps.ReverseIterator(xs) - case xs: Array[Double] => new ArrayOps.ReverseIterator(xs) - case xs: Array[Long] => new ArrayOps.ReverseIterator(xs) - case xs: Array[Float] => new ArrayOps.ReverseIterator(xs) - case xs: Array[Char] => new ArrayOps.ReverseIterator(xs) - case xs: Array[Byte] => new ArrayOps.ReverseIterator(xs) - case xs: Array[Short] => new ArrayOps.ReverseIterator(xs) - case xs: Array[Boolean] => new ArrayOps.ReverseIterator(xs) - case xs: Array[Unit] => new ArrayOps.ReverseIterator(xs) - case null => throw new NullPointerException - }).asInstanceOf[Iterator[A]] - - /** Selects all elements of this array which satisfy a predicate. - * - * @param p the predicate used to test elements. - * @return a new array consisting of all elements of this array that satisfy the given predicate `p`. - */ - def filter(p: A => Boolean): Array[A] = { - val res = ArrayBuilder.make[A] - var i = 0 - while(i < xs.length) { - val x = xs(i) - if(p(x)) res += x - i += 1 - } - res.result() - } - - /** Selects all elements of this array which do not satisfy a predicate. - * - * @param p the predicate used to test elements. - * @return a new array consisting of all elements of this array that do not satisfy the given predicate `p`. - */ - def filterNot(p: A => Boolean): Array[A] = filter(x => !p(x)) - - /** Sorts this array according to an Ordering. - * - * The sort is stable. That is, elements that are equal (as determined by - * `lt`) appear in the same order in the sorted sequence as in the original. - * - * @see [[scala.math.Ordering]] - * - * @param ord the ordering to be used to compare elements. - * @return an array consisting of the elements of this array - * sorted according to the ordering `ord`. - */ - def sorted[B >: A](implicit ord: Ordering[B]): Array[A] = { - val len = xs.length - def boxed = if(len < ArrayOps.MaxStableSortLength) { - val a = xs.clone() - Sorting.stableSort(a)(ord.asInstanceOf[Ordering[A]]) - a - } else { - val a = Array.copyAs[AnyRef](xs, len)(ClassTag.AnyRef) - Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]) - Array.copyAs[A](a, len) - } - if(len <= 1) xs.clone() - else ((xs: Array[_]) match { - case xs: Array[AnyRef] => - val a = Arrays.copyOf(xs, len); Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]); a - case xs: Array[Int] => - if(ord eq Ordering.Int) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } - else boxed - case xs: Array[Long] => - if(ord eq Ordering.Long) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } - else boxed - case xs: Array[Char] => - if(ord eq Ordering.Char) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } - else boxed - case xs: Array[Byte] => - if(ord eq Ordering.Byte) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } - else boxed - case xs: Array[Short] => - if(ord eq Ordering.Short) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } - else boxed - case xs: Array[Boolean] => - if(ord eq Ordering.Boolean) { val a = Arrays.copyOf(xs, len); Sorting.stableSort(a); a } - else boxed - case xs => boxed - }).asInstanceOf[Array[A]] - } - - /** Sorts this array according to a comparison function. - * - * The sort is stable. That is, elements that are equal (as determined by - * `lt`) appear in the same order in the sorted sequence as in the original. - * - * @param lt the comparison function which tests whether - * its first argument precedes its second argument in - * the desired ordering. - * @return an array consisting of the elements of this array - * sorted according to the comparison function `lt`. - */ - def sortWith(lt: (A, A) => Boolean): Array[A] = sorted(Ordering.fromLessThan(lt)) - - /** Sorts this array according to the Ordering which results from transforming - * an implicitly given Ordering with a transformation function. - * - * @see [[scala.math.Ordering]] - * @param f the transformation function mapping elements - * to some other domain `B`. - * @param ord the ordering assumed on domain `B`. - * @tparam B the target type of the transformation `f`, and the type where - * the ordering `ord` is defined. - * @return an array consisting of the elements of this array - * sorted according to the ordering where `x < y` if - * `ord.lt(f(x), f(y))`. - */ - def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Array[A] = sorted(ord on f) - - /** Creates a non-strict filter of this array. - * - * Note: the difference between `c filter p` and `c withFilter p` is that - * the former creates a new array, whereas the latter only - * restricts the domain of subsequent `map`, `flatMap`, `foreach`, - * and `withFilter` operations. - * - * @param p the predicate used to test elements. - * @return an object of class `ArrayOps.WithFilter`, which supports - * `map`, `flatMap`, `foreach`, and `withFilter` operations. - * All these operations apply to those elements of this array - * which satisfy the predicate `p`. - */ - def withFilter(p: A => Boolean): ArrayOps.WithFilter[A]^{p} = new ArrayOps.WithFilter[A](p, xs) - - /** Finds index of first occurrence of some value in this array after or at some start index. - * - * @param elem the element value to search for. - * @param from the start index - * @return the index `>= from` of the first element of this array that is equal (as determined by `==`) - * to `elem`, or `-1`, if none exists. - */ - def indexOf(elem: A, from: Int = 0): Int = { - var i = from - while(i < xs.length) { - if(elem == xs(i)) return i - i += 1 - } - -1 - } - - /** Finds index of the first element satisfying some predicate after or at some start index. - * - * @param p the predicate used to test elements. - * @param from the start index - * @return the index `>= from` of the first element of this array that satisfies the predicate `p`, - * or `-1`, if none exists. - */ - def indexWhere(@deprecatedName("f", "2.13.3") p: A => Boolean, from: Int = 0): Int = { - var i = from - while(i < xs.length) { - if(p(xs(i))) return i - i += 1 - } - -1 - } - - /** Finds index of last occurrence of some value in this array before or at a given end index. - * - * @param elem the element value to search for. - * @param end the end index. - * @return the index `<= end` of the last element of this array that is equal (as determined by `==`) - * to `elem`, or `-1`, if none exists. - */ - def lastIndexOf(elem: A, end: Int = xs.length - 1): Int = { - var i = min(end, xs.length-1) - while(i >= 0) { - if(elem == xs(i)) return i - i -= 1 - } - -1 - } - - /** Finds index of last element satisfying some predicate before or at given end index. - * - * @param p the predicate used to test elements. - * @return the index `<= end` of the last element of this array that satisfies the predicate `p`, - * or `-1`, if none exists. - */ - def lastIndexWhere(p: A => Boolean, end: Int = xs.length - 1): Int = { - var i = min(end, xs.length-1) - while(i >= 0) { - if(p(xs(i))) return i - i -= 1 - } - -1 - } - - /** Finds the first element of the array satisfying a predicate, if any. - * - * @param p the predicate used to test elements. - * @return an option value containing the first element in the array - * that satisfies `p`, or `None` if none exists. - */ - def find(@deprecatedName("f", "2.13.3") p: A => Boolean): Option[A] = { - val idx = indexWhere(p) - if(idx == -1) None else Some(xs(idx)) - } - - /** Tests whether a predicate holds for at least one element of this array. - * - * @param p the predicate used to test elements. - * @return `true` if the given predicate `p` is satisfied by at least one element of this array, otherwise `false` - */ - def exists(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = indexWhere(p) >= 0 - - /** Tests whether a predicate holds for all elements of this array. - * - * @param p the predicate used to test elements. - * @return `true` if this array is empty or the given predicate `p` - * holds for all elements of this array, otherwise `false`. - */ - def forall(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = { - var i = 0 - while(i < xs.length) { - if(!p(xs(i))) return false - i += 1 - } - true - } - - /** Applies a binary operator to a start value and all elements of this array, - * going left to right. - * - * @param z the start value. - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return the result of inserting `op` between consecutive elements of this array, - * going left to right with the start value `z` on the left: - * {{{ - * op(...op(z, x_1), x_2, ..., x_n) - * }}} - * where `x,,1,,, ..., x,,n,,` are the elements of this array. - * Returns `z` if this array is empty. - */ - def foldLeft[B](z: B)(op: (B, A) => B): B = { - def f[@specialized(Specializable.Everything) sealed T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { - val length = xs.length - var v: Any = z - var i = 0 - while(i < length) { - v = op(v, xs(i)) - i += 1 - } - v - } - ((xs: Any @unchecked) match { - case null => throw new NullPointerException // null-check first helps static analysis of instanceOf - case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - }).asInstanceOf[B] - } - - /** Produces an array containing cumulative results of applying the binary - * operator going left to right. - * - * @param z the start value. - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return array with intermediate values. - * - * Example: - * {{{ - * Array(1, 2, 3, 4).scanLeft(0)(_ + _) == Array(0, 1, 3, 6, 10) - * }}} - * - */ - def scanLeft[sealed B : ClassTag](z: B)(op: (B, A) => B): Array[B] = { - var v = z - var i = 0 - val res = new Array[B](xs.length + 1) - while(i < xs.length) { - res(i) = v - v = op(v, xs(i)) - i += 1 - } - res(i) = v - res - } - - /** Computes a prefix scan of the elements of the array. - * - * Note: The neutral element `z` may be applied more than once. - * - * @tparam B element type of the resulting array - * @param z neutral element for the operator `op` - * @param op the associative operator for the scan - * - * @return a new array containing the prefix scan of the elements in this array - */ - def scan[sealed B >: A : ClassTag](z: B)(op: (B, B) => B): Array[B] = scanLeft(z)(op) - - /** Produces an array containing cumulative results of applying the binary - * operator going right to left. - * - * @param z the start value. - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return array with intermediate values. - * - * Example: - * {{{ - * Array(4, 3, 2, 1).scanRight(0)(_ + _) == Array(10, 6, 3, 1, 0) - * }}} - * - */ - def scanRight[sealed B : ClassTag](z: B)(op: (A, B) => B): Array[B] = { - var v = z - var i = xs.length - 1 - val res = new Array[B](xs.length + 1) - res(xs.length) = z - while(i >= 0) { - v = op(xs(i), v) - res(i) = v - i -= 1 - } - res - } - - /** Applies a binary operator to all elements of this array and a start value, - * going right to left. - * - * @param z the start value. - * @param op the binary operator. - * @tparam B the result type of the binary operator. - * @return the result of inserting `op` between consecutive elements of this array, - * going right to left with the start value `z` on the right: - * {{{ - * op(x_1, op(x_2, ... op(x_n, z)...)) - * }}} - * where `x,,1,,, ..., x,,n,,` are the elements of this array. - * Returns `z` if this array is empty. - */ - def foldRight[B](z: B)(op: (A, B) => B): B = { - def f[@specialized(Specializable.Everything) sealed T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { - var v = z - var i = xs.length - 1 - while(i >= 0) { - v = op(xs(i), v) - i -= 1 - } - v - } - ((xs: Any @unchecked) match { - case null => throw new NullPointerException - case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) - }).asInstanceOf[B] - - } - - /** Folds the elements of this array using the specified associative binary operator. - * - * @tparam A1 a type parameter for the binary operator, a supertype of `A`. - * @param z a neutral element for the fold operation; may be added to the result - * an arbitrary number of times, and must not change the result (e.g., `Nil` for list concatenation, - * 0 for addition, or 1 for multiplication). - * @param op a binary operator that must be associative. - * @return the result of applying the fold operator `op` between all the elements, or `z` if this array is empty. - */ - def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op) - - /** Builds a new array by applying a function to all elements of this array. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned array. - * @return a new array resulting from applying the given function - * `f` to each element of this array and collecting the results. - */ - def map[sealed B](f: A => B)(implicit ct: ClassTag[B]): Array[B] = { - val len = xs.length - val ys = new Array[B](len) - if(len > 0) { - var i = 0 - (xs: Any @unchecked) match { - case xs: Array[AnyRef] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Int] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Double] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Long] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Float] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Char] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Byte] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Short] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Boolean] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } - } - } - ys - } - - def mapInPlace(f: A => A): Array[A] = { - var i = 0 - while (i < xs.length) { - xs.update(i, f(xs(i))) - i = i + 1 - } - xs - } - - /** Builds a new array by applying a function to all elements of this array - * and using the elements of the resulting collections. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned array. - * @return a new array resulting from applying the given collection-valued function - * `f` to each element of this array and concatenating the results. - */ - def flatMap[sealed B : ClassTag](f: A => IterableOnce[B]): Array[B] = { - val b = ArrayBuilder.make[B] - var i = 0 - while(i < xs.length) { - b ++= f(xs(i)) - i += 1 - } - b.result() - } - - def flatMap[BS, sealed B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = - flatMap[B](x => asIterable(f(x))) - - /** Flattens a two-dimensional array by concatenating all its rows - * into a single array. - * - * @tparam B Type of row elements. - * @param asIterable A function that converts elements of this array to rows - Iterables of type `B`. - * @return An array obtained by concatenating rows of this array. - */ - def flatten[sealed B](implicit asIterable: A => IterableOnce[B], m: ClassTag[B]): Array[B] = { - val b = ArrayBuilder.make[B] - val len = xs.length - var size = 0 - var i = 0 - while(i < len) { - xs(i) match { - case it: IterableOnce[_] => - val k = it.knownSize - if(k > 0) size += k - case a: Array[_] => size += a.length - case _ => - } - i += 1 - } - if(size > 0) b.sizeHint(size) - i = 0 - while(i < len) { - b ++= asIterable(xs(i)) - i += 1 - } - b.result() - } - - /** Builds a new array by applying a partial function to all elements of this array - * on which the function is defined. - * - * @param pf the partial function which filters and maps the array. - * @tparam B the element type of the returned array. - * @return a new array resulting from applying the given partial function - * `pf` to each element on which it is defined and collecting the results. - * The order of the elements is preserved. - */ - def collect[sealed B: ClassTag](pf: PartialFunction[A, B]): Array[B] = { - val fallback: Any => Any = ArrayOps.fallback - val b = ArrayBuilder.make[B] - var i = 0 - while (i < xs.length) { - val v = pf.applyOrElse(xs(i), fallback) - if (v.asInstanceOf[AnyRef] ne fallback) b.addOne(v.asInstanceOf[B]) - i += 1 - } - b.result() - } - - /** Finds the first element of the array for which the given partial function is defined, and applies the - * partial function to it. */ - def collectFirst[B](@deprecatedName("f","2.13.9") pf: PartialFunction[A, B]): Option[B] = { - val fallback: Any => Any = ArrayOps.fallback - var i = 0 - while (i < xs.length) { - val v = pf.applyOrElse(xs(i), fallback) - if (v.asInstanceOf[AnyRef] ne fallback) return Some(v.asInstanceOf[B]) - i += 1 - } - None - } - - /** Returns an array formed from this array and another iterable collection - * by combining corresponding elements in pairs. - * If one of the two collections is longer than the other, its remaining elements are ignored. - * - * @param that The iterable providing the second half of each result pair - * @tparam B the type of the second half of the returned pairs - * @return a new array containing pairs consisting of corresponding elements of this array and `that`. - * The length of the returned array is the minimum of the lengths of this array and `that`. - */ - def zip[sealed B](that: IterableOnce[B]): Array[(A, B)] = { - val b = new ArrayBuilder.ofRef[(A, B)]() - val k = that.knownSize - b.sizeHint(if(k >= 0) min(k, xs.length) else xs.length) - var i = 0 - val it = that.iterator - while(i < xs.length && it.hasNext) { - b += ((xs(i), it.next())) - i += 1 - } - b.result() - } - - /** Analogous to `zip` except that the elements in each collection are not consumed until a strict operation is - * invoked on the returned `LazyZip2` decorator. - * - * Calls to `lazyZip` can be chained to support higher arities (up to 4) without incurring the expense of - * constructing and deconstructing intermediary tuples. - * - * {{{ - * val xs = List(1, 2, 3) - * val res = (xs lazyZip xs lazyZip xs lazyZip xs).map((a, b, c, d) => a + b + c + d) - * // res == List(4, 8, 12) - * }}} - * - * @param that the iterable providing the second element of each eventual pair - * @tparam B the type of the second element in each eventual pair - * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs - * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. - */ - def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, Array[A]] = new LazyZip2(xs, immutable.ArraySeq.unsafeWrapArray(xs), that) - - /** Returns an array formed from this array and another iterable collection - * by combining corresponding elements in pairs. - * If one of the two collections is shorter than the other, - * placeholder elements are used to extend the shorter collection to the length of the longer. - * - * @param that the iterable providing the second half of each result pair - * @param thisElem the element to be used to fill up the result if this array is shorter than `that`. - * @param thatElem the element to be used to fill up the result if `that` is shorter than this array. - * @return a new array containing pairs consisting of corresponding elements of this array and `that`. - * The length of the returned array is the maximum of the lengths of this array and `that`. - * If this array is shorter than `that`, `thisElem` values are used to pad the result. - * If `that` is shorter than this array, `thatElem` values are used to pad the result. - */ - def zipAll[sealed A1 >: A, sealed B](that: Iterable[B], thisElem: A1, thatElem: B): Array[(A1, B)] = { - val b = new ArrayBuilder.ofRef[(A1, B)]() - val k = that.knownSize - b.sizeHint(max(k, xs.length)) - var i = 0 - val it = that.iterator - while(i < xs.length && it.hasNext) { - b += ((xs(i), it.next())) - i += 1 - } - while(it.hasNext) { - b += ((thisElem, it.next())) - i += 1 - } - while(i < xs.length) { - b += ((xs(i), thatElem)) - i += 1 - } - b.result() - } - - /** Zips this array with its indices. - * - * @return A new array containing pairs consisting of all elements of this array paired with their index. - * Indices start at `0`. - */ - def zipWithIndex: Array[(A, Int)] = { - val b = new Array[(A, Int)](xs.length) - var i = 0 - while(i < xs.length) { - b(i) = ((xs(i), i)) - i += 1 - } - b - } - - /** A copy of this array with an element appended. */ - def appended[sealed B >: A : ClassTag](x: B): Array[B] = { - val dest = Array.copyAs[B](xs, xs.length+1) - dest(xs.length) = x - dest - } - - @`inline` final def :+ [sealed B >: A : ClassTag](x: B): Array[B] = appended(x) - - /** A copy of this array with an element prepended. */ - def prepended[sealed B >: A : ClassTag](x: B): Array[B] = { - val dest = new Array[B](xs.length + 1) - dest(0) = x - Array.copy(xs, 0, dest, 1, xs.length) - dest - } - - @`inline` final def +: [sealed B >: A : ClassTag](x: B): Array[B] = prepended(x) - - /** A copy of this array with all elements of a collection prepended. */ - def prependedAll[sealed B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = { - val b = ArrayBuilder.make[B] - val k = prefix.knownSize - if(k >= 0) b.sizeHint(k + xs.length) - b.addAll(prefix) - if(k < 0) b.sizeHint(b.length + xs.length) - b.addAll(xs) - b.result() - } - - /** A copy of this array with all elements of an array prepended. */ - def prependedAll[sealed B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = { - val dest = Array.copyAs[B](prefix, prefix.length+xs.length) - Array.copy(xs, 0, dest, prefix.length, xs.length) - dest - } - - @`inline` final def ++: [sealed B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = prependedAll(prefix) - - @`inline` final def ++: [sealed B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = prependedAll(prefix) - - /** A copy of this array with all elements of a collection appended. */ - def appendedAll[sealed B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = { - val b = ArrayBuilder.make[B] - val k = suffix.knownSize - if(k >= 0) b.sizeHint(k + xs.length) - b.addAll(xs) - b.addAll(suffix) - b.result() - } - - /** A copy of this array with all elements of an array appended. */ - def appendedAll[sealed B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = { - val dest = Array.copyAs[B](xs, xs.length+suffix.length) - Array.copy(suffix, 0, dest, xs.length, suffix.length) - dest - } - - @`inline` final def :++ [sealed B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) - - @`inline` final def :++ [sealed B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) - - @`inline` final def concat[sealed B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) - - @`inline` final def concat[sealed B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) - - @`inline` final def ++[sealed B >: A : ClassTag](xs: IterableOnce[B]): Array[B] = appendedAll(xs) - - @`inline` final def ++[sealed B >: A : ClassTag](xs: Array[_ <: B]): Array[B] = appendedAll(xs) - - /** Tests whether this array contains a given value as an element. - * - * @param elem the element to test. - * @return `true` if this array has an element that is equal (as - * determined by `==`) to `elem`, `false` otherwise. - */ - def contains(elem: A): Boolean = exists (_ == elem) - - /** Returns a copy of this array with patched values. - * Patching at negative indices is the same as patching starting at 0. - * Patching at indices at or larger than the length of the original array appends the patch to the end. - * If more values are replaced than actually exist, the excess is ignored. - * - * @param from The start index from which to patch - * @param other The patch values - * @param replaced The number of values in the original array that are replaced by the patch. - */ - def patch[sealed B >: A : ClassTag](from: Int, other: IterableOnce[B], replaced: Int): Array[B] = { - val b = ArrayBuilder.make[B] - val k = other.knownSize - val r = if(replaced < 0) 0 else replaced - if(k >= 0) b.sizeHint(xs.length + k - r) - val chunk1 = if(from > 0) min(from, xs.length) else 0 - if(chunk1 > 0) b.addAll(xs, 0, chunk1) - b ++= other - val remaining = xs.length - chunk1 - r - if(remaining > 0) b.addAll(xs, xs.length - remaining, remaining) - b.result() - } - - /** Converts an array of pairs into an array of first elements and an array of second elements. - * - * @tparam A1 the type of the first half of the element pairs - * @tparam A2 the type of the second half of the element pairs - * @param asPair an implicit conversion which asserts that the element type - * of this Array is a pair. - * @param ct1 a class tag for `A1` type parameter that is required to create an instance - * of `Array[A1]` - * @param ct2 a class tag for `A2` type parameter that is required to create an instance - * of `Array[A2]` - * @return a pair of Arrays, containing, respectively, the first and second half - * of each element pair of this Array. - */ - def unzip[sealed A1, sealed A2](implicit asPair: A => (A1, A2), ct1: ClassTag[A1], ct2: ClassTag[A2]): (Array[A1], Array[A2]) = { - val a1 = new Array[A1](xs.length) - val a2 = new Array[A2](xs.length) - var i = 0 - while (i < xs.length) { - val e = asPair(xs(i)) - a1(i) = e._1 - a2(i) = e._2 - i += 1 - } - (a1, a2) - } - - /** Converts an array of triples into three arrays, one containing the elements from each position of the triple. - * - * @tparam A1 the type of the first of three elements in the triple - * @tparam A2 the type of the second of three elements in the triple - * @tparam A3 the type of the third of three elements in the triple - * @param asTriple an implicit conversion which asserts that the element type - * of this Array is a triple. - * @param ct1 a class tag for T1 type parameter that is required to create an instance - * of Array[T1] - * @param ct2 a class tag for T2 type parameter that is required to create an instance - * of Array[T2] - * @param ct3 a class tag for T3 type parameter that is required to create an instance - * of Array[T3] - * @return a triple of Arrays, containing, respectively, the first, second, and third - * elements from each element triple of this Array. - */ - def unzip3[sealed A1, sealed A2, sealed A3](implicit asTriple: A => (A1, A2, A3), ct1: ClassTag[A1], ct2: ClassTag[A2], - ct3: ClassTag[A3]): (Array[A1], Array[A2], Array[A3]) = { - val a1 = new Array[A1](xs.length) - val a2 = new Array[A2](xs.length) - val a3 = new Array[A3](xs.length) - var i = 0 - while (i < xs.length) { - val e = asTriple(xs(i)) - a1(i) = e._1 - a2(i) = e._2 - a3(i) = e._3 - i += 1 - } - (a1, a2, a3) - } - - /** Transposes a two dimensional array. - * - * @tparam B Type of row elements. - * @param asArray A function that converts elements of this array to rows - arrays of type `B`. - * @return An array obtained by replacing elements of this arrays with rows the represent. - */ - def transpose[sealed B](implicit asArray: A => Array[B]): Array[Array[B]] = { - val aClass = xs.getClass.getComponentType - val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass)) - if (xs.length == 0) bb.result() - else { - def mkRowBuilder() = ArrayBuilder.make[B](ClassTag[B](aClass.getComponentType)) - val bs = new ArrayOps(asArray(xs(0))).map((x: B) => mkRowBuilder()) - for (xs <- this) { - var i = 0 - for (x <- new ArrayOps(asArray(xs))) { - bs(i) += x - i += 1 - } - } - for (b <- new ArrayOps(bs)) bb += b.result() - bb.result() - } - } - - /** Apply `f` to each element for its side effects. - * Note: [U] parameter needed to help scalac's type inference. - */ - def foreach[U](f: A => U): Unit = { - val len = xs.length - var i = 0 - (xs: Any @unchecked) match { - case xs: Array[AnyRef] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Int] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Double] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Long] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Float] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Char] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Byte] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Short] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } - case xs: Array[Boolean] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } - } - } - - /** Selects all the elements of this array ignoring the duplicates. - * - * @return a new array consisting of all the elements of this array without duplicates. - */ - def distinct: Array[A] = distinctBy(identity) - - /** Selects all the elements of this array ignoring the duplicates as determined by `==` after applying - * the transforming function `f`. - * - * @param f The transforming function whose result is used to determine the uniqueness of each element - * @tparam B the type of the elements after being transformed by `f` - * @return a new array consisting of all the elements of this array without duplicates. - */ - def distinctBy[B](f: A -> B): Array[A] = - ArrayBuilder.make[A].addAll(iterator.distinctBy(f)).result() - - /** A copy of this array with an element value appended until a given target length is reached. - * - * @param len the target length - * @param elem the padding value - * @tparam B the element type of the returned array. - * @return a new array consisting of - * all elements of this array followed by the minimal number of occurrences of `elem` so - * that the resulting collection has a length of at least `len`. - */ - def padTo[sealed B >: A : ClassTag](len: Int, elem: B): Array[B] = { - var i = xs.length - val newlen = max(i, len) - val dest = Array.copyAs[B](xs, newlen) - while(i < newlen) { - dest(i) = elem - i += 1 - } - dest - } - - /** Produces the range of all indices of this sequence. - * - * @return a `Range` value from `0` to one less than the length of this array. - */ - def indices: Range = Range(0, xs.length) - - /** Partitions this array into a map of arrays according to some discriminator function. - * - * @param f the discriminator function. - * @tparam K the type of keys returned by the discriminator function. - * @return A map from keys to arrays such that the following invariant holds: - * {{{ - * (xs groupBy f)(k) = xs filter (x => f(x) == k) - * }}} - * That is, every key `k` is bound to an array of those elements `x` - * for which `f(x)` equals `k`. - */ - def groupBy[K](f: A => K): immutable.Map[K, Array[A]] = { - val m = mutable.Map.empty[K, ArrayBuilder[A]] - val len = xs.length - var i = 0 - while(i < len) { - val elem = xs(i) - val key = f(elem) - val bldr = m.getOrElseUpdate(key, ArrayBuilder.make[A]) - bldr += elem - i += 1 - } - m.view.mapValues(_.result()).toMap - } - - /** - * Partitions this array into a map of arrays according to a discriminator function `key`. - * Each element in a group is transformed into a value of type `B` using the `value` function. - * - * It is equivalent to `groupBy(key).mapValues(_.map(f))`, but more efficient. - * - * {{{ - * case class User(name: String, age: Int) - * - * def namesByAge(users: Array[User]): Map[Int, Array[String]] = - * users.groupMap(_.age)(_.name) - * }}} - * - * @param key the discriminator function - * @param f the element transformation function - * @tparam K the type of keys returned by the discriminator function - * @tparam B the type of values returned by the transformation function - */ - def groupMap[K, sealed B : ClassTag](key: A => K)(f: A => B): immutable.Map[K, Array[B]] = { - val m = mutable.Map.empty[K, ArrayBuilder[B]] - val len = xs.length - var i = 0 - while(i < len) { - val elem = xs(i) - val k = key(elem) - val bldr = m.getOrElseUpdate(k, ArrayBuilder.make[B]) - bldr += f(elem) - i += 1 - } - m.view.mapValues(_.result()).toMap - } - - @`inline` final def toSeq: immutable.Seq[A] = toIndexedSeq - - def toIndexedSeq: immutable.IndexedSeq[A] = - immutable.ArraySeq.unsafeWrapArray(Array.copyOf(xs, xs.length)) - - /** Copy elements of this array to another array. - * Fills the given array `xs` starting at index 0. - * Copying will stop once either all the elements of this array have been copied, - * or the end of the array is reached. - * - * @param xs the array to fill. - * @tparam B the type of the elements of the array. - */ - def copyToArray[sealed B >: A](xs: Array[B]): Int = copyToArray(xs, 0) - - /** Copy elements of this array to another array. - * Fills the given array `xs` starting at index `start`. - * Copying will stop once either all the elements of this array have been copied, - * or the end of the array is reached. - * - * @param xs the array to fill. - * @param start the starting index within the destination array. - * @tparam B the type of the elements of the array. - */ - def copyToArray[sealed B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) - - /** Copy elements of this array to another array. - * Fills the given array `xs` starting at index `start` with at most `len` values. - * Copying will stop once either all the elements of this array have been copied, - * or the end of the array is reached, or `len` elements have been copied. - * - * @param xs the array to fill. - * @param start the starting index within the destination array. - * @param len the maximal number of elements to copy. - * @tparam B the type of the elements of the array. - */ - def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { - val copied = IterableOnce.elemsToCopyToArray(this.xs.length, xs.length, start, len) - if (copied > 0) { - Array.copy(this.xs, 0, xs, start, copied) - } - copied - } - - /** Create a copy of this array with the specified element type. */ - def toArray[sealed B >: A: ClassTag]: Array[B] = { - val destination = new Array[B](xs.length) - copyToArray(destination, 0) - destination - } - - /** Counts the number of elements in this array which satisfy a predicate */ - def count(p: A => Boolean): Int = { - var i, res = 0 - val len = xs.length - while(i < len) { - if(p(xs(i))) res += 1 - i += 1 - } - res - } - - // can't use a default arg because we already have another overload with a default arg - /** Tests whether this array starts with the given array. */ - @`inline` def startsWith[sealed B >: A](that: Array[B]): Boolean = startsWith(that, 0) - - /** Tests whether this array contains the given array at a given index. - * - * @param that the array to test - * @param offset the index where the array is searched. - * @return `true` if the array `that` is contained in this array at - * index `offset`, otherwise `false`. - */ - def startsWith[sealed B >: A](that: Array[B], offset: Int): Boolean = { - val safeOffset = offset.max(0) - val thatl = that.length - if(thatl > xs.length-safeOffset) thatl == 0 - else { - var i = 0 - while(i < thatl) { - if(xs(i+safeOffset) != that(i)) return false - i += 1 - } - true - } - } - - /** Tests whether this array ends with the given array. - * - * @param that the array to test - * @return `true` if this array has `that` as a suffix, `false` otherwise. - */ - def endsWith[sealed B >: A](that: Array[B]): Boolean = { - val thatl = that.length - val off = xs.length - thatl - if(off < 0) false - else { - var i = 0 - while(i < thatl) { - if(xs(i+off) != that(i)) return false - i += 1 - } - true - } - } - - /** A copy of this array with one single replaced element. - * @param index the position of the replacement - * @param elem the replacing element - * @return a new array which is a copy of this array with the element at position `index` replaced by `elem`. - * @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`. - */ - def updated[sealed B >: A : ClassTag](index: Int, elem: B): Array[B] = { - if(index < 0 || index >= xs.length) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${xs.length-1})") - val dest = toArray[B] - dest(index) = elem - dest - } - - @`inline` def view: IndexedSeqView[A] = new ArrayOps.ArrayView[A](xs) - - - /* ************************************************************************************************************ - The remaining methods are provided for completeness but they delegate to mutable.ArraySeq implementations which - may not provide the best possible performance. We need them in `ArrayOps` because their return type - mentions `C` (which is `Array[A]` in `StringOps` and `mutable.ArraySeq[A]` in `mutable.ArraySeq`). - ************************************************************************************************************ */ - - - /** Computes the multiset difference between this array and another sequence. - * - * @param that the sequence of elements to remove - * @return a new array which contains all elements of this array - * except some of occurrences of elements that also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form - * part of the result, but any following occurrences will. - */ - def diff[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).diff(that).toArray[A] - - /** Computes the multiset intersection between this array and another sequence. - * - * @param that the sequence of elements to intersect with. - * @return a new array which contains all elements of this array - * which also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained - * in the result, but any following occurrences will be omitted. - */ - def intersect[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).intersect(that).toArray[A] - - /** Groups elements in fixed size blocks by passing a "sliding window" - * over them (as opposed to partitioning them, as is done in grouped.) - * @see [[scala.collection.Iterator]], method `sliding` - * - * @param size the number of elements per group - * @param step the distance between the first elements of successive groups - * @return An iterator producing arrays of size `size`, except the - * last element (which may be the only element) will be truncated - * if there are fewer than `size` elements remaining to be grouped. - */ - def sliding(size: Int, step: Int = 1): Iterator[Array[A]] = mutable.ArraySeq.make(xs).sliding(size, step).map(_.toArray[A]) - - /** Iterates over combinations of elements. - * - * A '''combination''' of length `n` is a sequence of `n` elements selected in order of their first index in this sequence. - * - * For example, `"xyx"` has two combinations of length 2. The `x` is selected first: `"xx"`, `"xy"`. - * The sequence `"yx"` is not returned as a combination because it is subsumed by `"xy"`. - * - * If there is more than one way to generate the same combination, only one will be returned. - * - * For example, the result `"xy"` arbitrarily selected one of the `x` elements. - * - * As a further illustration, `"xyxx"` has three different ways to generate `"xy"` because there are three elements `x` - * to choose from. Moreover, there are three unordered pairs `"xx"` but only one is returned. - * - * It is not specified which of these equal combinations is returned. It is an implementation detail - * that should not be relied on. For example, the combination `"xx"` does not necessarily contain - * the first `x` in this sequence. This behavior is observable if the elements compare equal - * but are not identical. - * - * As a consequence, `"xyx".combinations(3).next()` is `"xxy"`: the combination does not reflect the order - * of the original sequence, but the order in which elements were selected, by "first index"; - * the order of each `x` element is also arbitrary. - * - * @return An Iterator which traverses the n-element combinations of this array - * @example {{{ - * Array('a', 'b', 'b', 'b', 'c').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println) - * // Array(a, b) - * // Array(a, c) - * // Array(b, b) - * // Array(b, c) - * Array('b', 'a', 'b').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println) - * // Array(b, b) - * // Array(b, a) - * }}} - */ - def combinations(n: Int): Iterator[Array[A]] = mutable.ArraySeq.make(xs).combinations(n).map(_.toArray[A]) - - /** Iterates over distinct permutations of elements. - * - * @return An Iterator which traverses the distinct permutations of this array. - * @example {{{ - * Array('a', 'b', 'b').permutations.map(runtime.ScalaRunTime.stringOf).foreach(println) - * // Array(a, b, b) - * // Array(b, a, b) - * // Array(b, b, a) - * }}} - */ - def permutations: Iterator[Array[A]] = mutable.ArraySeq.make(xs).permutations.map(_.toArray[A]) - - // we have another overload here, so we need to duplicate this method - /** Tests whether this array contains the given sequence at a given index. - * - * @param that the sequence to test - * @param offset the index where the sequence is searched. - * @return `true` if the sequence `that` is contained in this array at - * index `offset`, otherwise `false`. - */ - def startsWith[B >: A](that: IterableOnce[B], offset: Int = 0): Boolean = mutable.ArraySeq.make(xs).startsWith(that, offset) - - // we have another overload here, so we need to duplicate this method - /** Tests whether this array ends with the given sequence. - * - * @param that the sequence to test - * @return `true` if this array has `that` as a suffix, `false` otherwise. - */ - def endsWith[B >: A](that: Iterable[B]): Boolean = mutable.ArraySeq.make(xs).endsWith(that) -} diff --git a/tests/pos-special/stdlib/collection/BitSet.scala b/tests/pos-special/stdlib/collection/BitSet.scala deleted file mode 100644 index 39c15dbe808f..000000000000 --- a/tests/pos-special/stdlib/collection/BitSet.scala +++ /dev/null @@ -1,348 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import java.io.{ObjectInputStream, ObjectOutputStream} - -import scala.annotation.nowarn -import scala.collection.Stepper.EfficientSplit -import scala.collection.mutable.Builder -import language.experimental.captureChecking - -/** Base type of bitsets. - * - * This trait provides most of the operations of a `BitSet` independently of its representation. - * It is inherited by all concrete implementations of bitsets. - * - * @define bitsetinfo - * Bitsets are sets of non-negative integers which are represented as - * variable-size arrays of bits packed into 64-bit words. The lower bound of memory footprint of a bitset is - * determined by the largest number stored in it. - * @define coll bitset - * @define Coll `BitSet` - */ -trait BitSet extends SortedSet[Int] with BitSetOps[BitSet] { - override protected def fromSpecific(coll: IterableOnce[Int]^): BitSet = bitSetFactory.fromSpecific(coll) - override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder - override def empty: BitSet = bitSetFactory.empty - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "BitSet" - override def unsorted: Set[Int] = this -} - -@SerialVersionUID(3L) -object BitSet extends SpecificIterableFactory[Int, BitSet] { - private[collection] final val ordMsg = "No implicit Ordering[${B}] found to build a SortedSet[${B}]. You may want to upcast to a Set[Int] first by calling `unsorted`." - private[collection] final val zipOrdMsg = "No implicit Ordering[${B}] found to build a SortedSet[(Int, ${B})]. You may want to upcast to a Set[Int] first by calling `unsorted`." - - def empty: BitSet = immutable.BitSet.empty - def newBuilder: Builder[Int, BitSet] = immutable.BitSet.newBuilder - def fromSpecific(it: IterableOnce[Int]^): BitSet = immutable.BitSet.fromSpecific(it) - - @SerialVersionUID(3L) - private[collection] abstract class SerializationProxy(@transient protected val coll: BitSet) extends Serializable { - - @transient protected var elems: Array[Long] = _ - - private[this] def writeObject(out: ObjectOutputStream): Unit = { - out.defaultWriteObject() - val nwords = coll.nwords - out.writeInt(nwords) - var i = 0 - while(i < nwords) { - out.writeLong(coll.word(i)) - i += 1 - } - } - - private[this] def readObject(in: ObjectInputStream): Unit = { - in.defaultReadObject() - val nwords = in.readInt() - elems = new Array[Long](nwords) - var i = 0 - while(i < nwords) { - elems(i) = in.readLong() - i += 1 - } - } - - protected[this] def readResolve(): Any - } -} - -/** Base implementation type of bitsets */ -trait BitSetOps[+C <: BitSet with BitSetOps[C]] - extends SortedSetOps[Int, SortedSet, C] { self => - import BitSetOps._ - - def bitSetFactory: SpecificIterableFactory[Int, C] - - def unsorted: Set[Int] - - final def ordering: Ordering[Int] = Ordering.Int - - /** The number of words (each with 64 bits) making up the set */ - protected[collection] def nwords: Int - - /** The words at index `idx`, or 0L if outside the range of the set - * '''Note:''' requires `idx >= 0` - */ - protected[collection] def word(idx: Int): Long - - /** Creates a new set of this kind from an array of longs - */ - protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): C - - def contains(elem: Int): Boolean = - 0 <= elem && (word(elem >> LogWL) & (1L << elem)) != 0L - - def iterator: Iterator[Int] = iteratorFrom(0) - - def iteratorFrom(start: Int): Iterator[Int] = new AbstractIterator[Int] { - private[this] var currentPos = if (start > 0) start >> LogWL else 0 - private[this] var currentWord = if (start > 0) word(currentPos) & (-1L << (start & (WordLength - 1))) else word(0) - final override def hasNext: Boolean = { - while (currentWord == 0) { - if (currentPos + 1 >= nwords) return false - currentPos += 1 - currentWord = word(currentPos) - } - true - } - final override def next(): Int = { - if (hasNext) { - val bitPos = java.lang.Long.numberOfTrailingZeros(currentWord) - currentWord &= currentWord - 1 - (currentPos << LogWL) + bitPos - } else Iterator.empty.next() - } - } - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = { - val st = scala.collection.convert.impl.BitSetStepper.from(this) - val r = - if (shape.shape == StepperShape.IntShape) st - else { - assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") - AnyStepper.ofParIntStepper(st) - } - r.asInstanceOf[S with EfficientSplit] - } - - override def size: Int = { - var s = 0 - var i = nwords - while (i > 0) { - i -= 1 - s += java.lang.Long.bitCount(word(i)) - } - s - } - - override def isEmpty: Boolean = 0 until nwords forall (i => word(i) == 0) - - @inline private[this] def smallestInt: Int = { - val thisnwords = nwords - var i = 0 - while(i < thisnwords) { - val currentWord = word(i) - if (currentWord != 0L) { - return java.lang.Long.numberOfTrailingZeros(currentWord) + (i * WordLength) - } - i += 1 - } - throw new UnsupportedOperationException("empty.smallestInt") - } - - @inline private[this] def largestInt: Int = { - var i = nwords - 1 - while(i >= 0) { - val currentWord = word(i) - if (currentWord != 0L) { - return ((i + 1) * WordLength) - java.lang.Long.numberOfLeadingZeros(currentWord) - 1 - } - i -= 1 - } - throw new UnsupportedOperationException("empty.largestInt") - } - - override def max[B >: Int](implicit ord: Ordering[B]): Int = - if (Ordering.Int eq ord) largestInt - else if (Ordering.Int isReverseOf ord) smallestInt - else super.max(ord) - - - override def min[B >: Int](implicit ord: Ordering[B]): Int = - if (Ordering.Int eq ord) smallestInt - else if (Ordering.Int isReverseOf ord) largestInt - else super.min(ord) - - override def foreach[U](f: Int => U): Unit = { - /* NOTE: while loops are significantly faster as of 2.11 and - one major use case of bitsets is performance. Also, there - is nothing to do when all bits are clear, so use that as - the inner loop condition. */ - var i = 0 - while (i < nwords) { - var w = word(i) - var j = i * WordLength - while (w != 0L) { - if ((w&1L) == 1L) f(j) - w = w >>> 1 - j += 1 - } - i += 1 - } - } - - /** Creates a bit mask for this set as a new array of longs - */ - def toBitMask: Array[Long] = { - val a = new Array[Long](nwords) - var i = a.length - while(i > 0) { - i -= 1 - a(i) = word(i) - } - a - } - - def rangeImpl(from: Option[Int], until: Option[Int]): C = { - val a = coll.toBitMask - val len = a.length - if (from.isDefined) { - val f = from.get - val w = f >> LogWL - val b = f & (WordLength - 1) - if (w >= 0) { - java.util.Arrays.fill(a, 0, math.min(w, len), 0) - if (b > 0 && w < len) a(w) &= ~((1L << b) - 1) - } - } - if (until.isDefined) { - val u = until.get - val w = u >> LogWL - val b = u & (WordLength - 1) - if (w < len) { - java.util.Arrays.fill(a, math.max(w + 1, 0), len, 0) - if (w >= 0) a(w) &= (1L << b) - 1 - } - } - coll.fromBitMaskNoCopy(a) - } - - override def concat(other: collection.IterableOnce[Int]): C = other match { - case otherBitset: BitSet => - val len = coll.nwords max otherBitset.nwords - val words = new Array[Long](len) - for (idx <- 0 until len) - words(idx) = this.word(idx) | otherBitset.word(idx) - fromBitMaskNoCopy(words) - case _ => super.concat(other) - } - - override def intersect(other: Set[Int]): C = other match { - case otherBitset: BitSet => - val len = coll.nwords min otherBitset.nwords - val words = new Array[Long](len) - for (idx <- 0 until len) - words(idx) = this.word(idx) & otherBitset.word(idx) - fromBitMaskNoCopy(words) - case _ => super.intersect(other) - } - - abstract override def diff(other: Set[Int]): C = other match { - case otherBitset: BitSet => - val len = coll.nwords - val words = new Array[Long](len) - for (idx <- 0 until len) - words(idx) = this.word(idx) & ~otherBitset.word(idx) - fromBitMaskNoCopy(words) - case _ => super.diff(other) - } - - /** Computes the symmetric difference of this bitset and another bitset by performing - * a bitwise "exclusive-or". - * - * @param other the other bitset to take part in the symmetric difference. - * @return a bitset containing those bits of this - * bitset or the other bitset that are not contained in both bitsets. - */ - def xor(other: BitSet): C = { - val len = coll.nwords max other.nwords - val words = new Array[Long](len) - for (idx <- 0 until len) - words(idx) = coll.word(idx) ^ other.word(idx) - coll.fromBitMaskNoCopy(words) - } - - @`inline` final def ^ (other: BitSet): C = xor(other) - - /** - * Builds a new bitset by applying a function to all elements of this bitset - * @param f the function to apply to each element. - * @return a new bitset resulting from applying the given function ''f'' to - * each element of this bitset and collecting the results - */ - def map(f: Int => Int): C = fromSpecific(new View.Map(this, f)) - - def flatMap(f: Int => IterableOnce[Int]): C = fromSpecific(new View.FlatMap(this, f)) - - def collect(pf: PartialFunction[Int, Int]): C = fromSpecific(super[SortedSetOps].collect(pf)) - - override def partition(p: Int => Boolean): (C, C) = { - val left = filter(p) - (left, this &~ left) - } -} - -object BitSetOps { - - /* Final vals can sometimes be inlined as constants (faster) */ - private[collection] final val LogWL = 6 - private[collection] final val WordLength = 64 - private[collection] final val MaxSize = (Int.MaxValue >> LogWL) + 1 - - private[collection] def updateArray(elems: Array[Long], idx: Int, w: Long): Array[Long] = { - var len = elems.length - while (len > 0 && (elems(len - 1) == 0L || w == 0L && idx == len - 1)) len -= 1 - var newlen = len - if (idx >= newlen && w != 0L) newlen = idx + 1 - val newelems = new Array[Long](newlen) - Array.copy(elems, 0, newelems, 0, len) - if (idx < newlen) newelems(idx) = w - else assert(w == 0L) - newelems - } - - private[collection] def computeWordForFilter(pred: Int => Boolean, isFlipped: Boolean, oldWord: Long, wordIndex: Int): Long = - if (oldWord == 0L) 0L else { - var w = oldWord - val trailingZeroes = java.lang.Long.numberOfTrailingZeros(w) - var jmask = 1L << trailingZeroes - var j = wordIndex * BitSetOps.WordLength + trailingZeroes - val maxJ = (wordIndex + 1) * BitSetOps.WordLength - java.lang.Long.numberOfLeadingZeros(w) - while (j != maxJ) { - if ((w & jmask) != 0L) { - if (pred(j) == isFlipped) { - // j did not pass the filter here - w = w & ~jmask - } - } - jmask = jmask << 1 - j += 1 - } - w - } -} diff --git a/tests/pos-special/stdlib/collection/BufferedIterator.scala b/tests/pos-special/stdlib/collection/BufferedIterator.scala deleted file mode 100644 index cca40dd31d40..000000000000 --- a/tests/pos-special/stdlib/collection/BufferedIterator.scala +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -import language.experimental.captureChecking - -/** Buffered iterators are iterators which provide a method `head` - * that inspects the next element without discarding it. - */ -trait BufferedIterator[+A] extends Iterator[A] { - - /** Returns next element of iterator without advancing beyond it. - */ - def head: A - - /** Returns an option of the next element of an iterator without advancing beyond it. - * @return the next element of this iterator if it has a next element - * `None` if it does not - */ - def headOption : Option[A] = if (hasNext) Some(head) else None - - override def buffered: this.type = this -} diff --git a/tests/pos-special/stdlib/collection/BuildFrom.scala b/tests/pos-special/stdlib/collection/BuildFrom.scala deleted file mode 100644 index 0a3cc199d4dc..000000000000 --- a/tests/pos-special/stdlib/collection/BuildFrom.scala +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import scala.annotation.implicitNotFound -import scala.collection.mutable.Builder -import scala.collection.immutable.WrappedString -import scala.reflect.ClassTag -import language.experimental.captureChecking -import caps.unsafe.unsafeAssumePure - -/** Builds a collection of type `C` from elements of type `A` when a source collection of type `From` is available. - * Implicit instances of `BuildFrom` are available for all collection types. - * - * @tparam From Type of source collection - * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) - * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) - */ -@implicitNotFound(msg = "Cannot construct a collection of type ${C} with elements of type ${A} based on a collection of type ${From}.") -trait BuildFrom[-From, -A, +C] extends Any { self => - def fromSpecific(from: From)(it: IterableOnce[A]^): C - // !!! this is wrong, we need two versions of fromSpecific; one mapping - // to C^{it} when C is an Iterable, and one mapping to C when C is a Seq, Map, or Set. - // But that requires a large scale refactoring of BuildFrom. The unsafeAssumePure - // calls in this file are needed to sweep that problem under the carpet. - - /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. - * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ - def newBuilder(from: From): Builder[A, C] - - @deprecated("Use newBuilder() instead of apply()", "2.13.0") - @`inline` def apply(from: From): Builder[A, C] = newBuilder(from) - - /** Partially apply a BuildFrom to a Factory */ - def toFactory(from: From): Factory[A, C] = new Factory[A, C] { - def fromSpecific(it: IterableOnce[A]^): C = self.fromSpecific(from)(it) - def newBuilder: Builder[A, C] = self.newBuilder(from) - } -} - -object BuildFrom extends BuildFromLowPriority1 { - - /** Build the source collection type from a MapOps */ - implicit def buildFromMapOps[CC[X, Y] <: Map[X, Y] with MapOps[X, Y, CC, _], K0, V0, K, V]: BuildFrom[CC[K0, V0] with Map[K0, V0], (K, V), CC[K, V] with Map[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] { - //TODO: Reuse a prototype instance - def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: MapOps[K0, V0, CC, _]).mapFactory.newBuilder[K, V] - def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]^): CC[K, V] = (from: MapOps[K0, V0, CC, _]).mapFactory.from(it) - } - - /** Build the source collection type from a SortedMapOps */ - implicit def buildFromSortedMapOps[CC[X, Y] <: SortedMap[X, Y] with SortedMapOps[X, Y, CC, _], K0, V0, K : Ordering, V]: BuildFrom[CC[K0, V0] with SortedMap[K0, V0], (K, V), CC[K, V] with SortedMap[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] { - def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.newBuilder[K, V] - def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]^): CC[K, V] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.from(it) - } - - implicit def buildFromBitSet[C <: BitSet with BitSetOps[C]]: BuildFrom[C, Int, C] = - new BuildFrom[C, Int, C] { - def fromSpecific(from: C)(it: IterableOnce[Int]^): C = from.bitSetFactory.fromSpecific(it) - def newBuilder(from: C): Builder[Int, C] = from.bitSetFactory.newBuilder - } - - implicit val buildFromString: BuildFrom[String, Char, String] = - new BuildFrom[String, Char, String] { - def fromSpecific(from: String)(it: IterableOnce[Char]^): String = Factory.stringFactory.fromSpecific(it) - def newBuilder(from: String): Builder[Char, String] = Factory.stringFactory.newBuilder - } - - implicit val buildFromWrappedString: BuildFrom[WrappedString, Char, WrappedString] = - new BuildFrom[WrappedString, Char, WrappedString] { - def fromSpecific(from: WrappedString)(it: IterableOnce[Char]^): WrappedString = WrappedString.fromSpecific(it) - def newBuilder(from: WrappedString): mutable.Builder[Char, WrappedString] = WrappedString.newBuilder - } - - implicit def buildFromArray[sealed A : ClassTag]: BuildFrom[Array[_], A, Array[A]] = - new BuildFrom[Array[_], A, Array[A]] { - def fromSpecific(from: Array[_])(it: IterableOnce[A]^): Array[A] = Factory.arrayFactory[A].fromSpecific(it) - def newBuilder(from: Array[_]): Builder[A, Array[A]] = Factory.arrayFactory[A].newBuilder - } - - implicit def buildFromView[A, sealed B]: BuildFrom[View[A], B, View[B]] = - new BuildFrom[View[A], B, View[B]] { - def fromSpecific(from: View[A])(it: IterableOnce[B]^): View[B] = View.from(it).unsafeAssumePure - def newBuilder(from: View[A]): Builder[B, View[B]] = View.newBuilder - } - -} - -trait BuildFromLowPriority1 extends BuildFromLowPriority2 { - - /** Build the source collection type from an Iterable with SortedOps */ - // Restating the upper bound of CC in the result type seems redundant, but it serves to prune the - // implicit search space for faster compilation and reduced change of divergence. See the compilation - // test in test/junit/scala/collection/BuildFromTest.scala and discussion in https://github.com/scala/scala/pull/10209 - implicit def buildFromSortedSetOps[CC[X] <: SortedSet[X] with SortedSetOps[X, CC, _], A0, A : Ordering]: BuildFrom[CC[A0] with SortedSet[A0], A, CC[A] with SortedSet[A]] = new BuildFrom[CC[A0], A, CC[A]] { - def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.newBuilder[A] - def fromSpecific(from: CC[A0])(it: IterableOnce[A]^): CC[A] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.from(it) - } - - implicit def fallbackStringCanBuildFrom[A]: BuildFrom[String, A, immutable.IndexedSeq[A]] = - new BuildFrom[String, A, immutable.IndexedSeq[A]] { - def fromSpecific(from: String)(it: IterableOnce[A]^): immutable.IndexedSeq[A] = immutable.IndexedSeq.from(it) - def newBuilder(from: String): Builder[A, immutable.IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A] - } -} - -trait BuildFromLowPriority2 { - /** Build the source collection type from an IterableOps */ - implicit def buildFromIterableOps[CC[X] <: Iterable[X] with IterableOps[X, CC, _], A0, A]: BuildFrom[CC[A0], A, CC[A]] = new BuildFrom[CC[A0], A, CC[A]] { - //TODO: Reuse a prototype instance - def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: IterableOps[A0, CC, _]).iterableFactory.newBuilder[A] - def fromSpecific(from: CC[A0])(it: IterableOnce[A]^): CC[A] = (from: IterableOps[A0, CC, _]).iterableFactory.from(it).unsafeAssumePure - } - - implicit def buildFromIterator[A]: BuildFrom[Iterator[_], A, Iterator[A]] = new BuildFrom[Iterator[_], A, Iterator[A]] { - def newBuilder(from: Iterator[_]): mutable.Builder[A, Iterator[A]] = Iterator.newBuilder - def fromSpecific(from: Iterator[_])(it: IterableOnce[A]^): Iterator[A] = Iterator.from(it).unsafeAssumePure - } -} diff --git a/tests/pos-special/stdlib/collection/DefaultMap.scala b/tests/pos-special/stdlib/collection/DefaultMap.scala deleted file mode 100644 index baa9eceadae5..000000000000 --- a/tests/pos-special/stdlib/collection/DefaultMap.scala +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -import language.experimental.captureChecking - -/** A default map which builds a default `immutable.Map` implementation for all - * transformations. - */ -@deprecated("DefaultMap is no longer necessary; extend Map directly", "2.13.0") -trait DefaultMap[K, +V] extends Map[K, V] diff --git a/tests/pos-special/stdlib/collection/Factory.scala b/tests/pos-special/stdlib/collection/Factory.scala deleted file mode 100644 index c45776b62b9c..000000000000 --- a/tests/pos-special/stdlib/collection/Factory.scala +++ /dev/null @@ -1,798 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import scala.collection.immutable.NumericRange -import scala.language.implicitConversions -import scala.collection.mutable.Builder -import scala.annotation.unchecked.uncheckedVariance -import scala.reflect.ClassTag -import language.experimental.captureChecking -import caps.unsafe.unsafeAssumePure - -/** - * A factory that builds a collection of type `C` with elements of type `A`. - * - * This is a general form of any factory ([[IterableFactory]], - * [[SortedIterableFactory]], [[MapFactory]] and [[SortedMapFactory]]) whose - * element type is fixed. - * - * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) - * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) - */ -trait Factory[-A, +C] extends Pure { - - /** - * @return A collection of type `C` containing the same elements - * as the source collection `it`. - * @param it Source collection - */ - def fromSpecific(it: IterableOnce[A]^): C - - /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. - * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ - def newBuilder: Builder[A, C] -} - -object Factory { - - implicit val stringFactory: Factory[Char, String] = new StringFactory - @SerialVersionUID(3L) - private class StringFactory extends Factory[Char, String] with Serializable { - def fromSpecific(it: IterableOnce[Char]^): String = { - val b = new mutable.StringBuilder(scala.math.max(0, it.knownSize)) - b ++= it - b.result() - } - def newBuilder: Builder[Char, String] = new mutable.StringBuilder() - } - - implicit def arrayFactory[sealed A: ClassTag]: Factory[A, Array[A]] = new ArrayFactory[A] - @SerialVersionUID(3L) - private class ArrayFactory[sealed A: ClassTag] extends Factory[A, Array[A]] with Serializable { - def fromSpecific(it: IterableOnce[A]^): Array[A] = { - val b = newBuilder - b.sizeHint(scala.math.max(0, it.knownSize)) - b ++= it - b.result() - } - def newBuilder: Builder[A, Array[A]] = mutable.ArrayBuilder.make[A] - } - -} - -/** Base trait for companion objects of unconstrained collection types that may require - * multiple traversals of a source collection to build a target collection `CC`. - * - * @tparam CC Collection type constructor (e.g. `List`) - * @define factoryInfo - * This object provides a set of operations to create $Coll values. - * - * @define coll collection - * @define Coll `Iterable` - */ -trait IterableFactory[+CC[_]] extends Serializable, Pure { - - /** Creates a target $coll from an existing source collection - * - * @param source Source collection - * @tparam A the type of the collection’s elements - * @return a new $coll with the elements of `source` - */ - def from[A](source: IterableOnce[A]^): CC[A]^{source} - - /** An empty collection - * @tparam A the type of the ${coll}'s elements - */ - def empty[A]: CC[A] - - /** Creates a $coll with the specified elements. - * @tparam A the type of the ${coll}'s elements - * @param elems the elements of the created $coll - * @return a new $coll with elements `elems` - */ - def apply[A](elems: A*): CC[A] = from(elems) - - /** Produces a $coll containing repeated applications of a function to a start value. - * - * @param start the start value of the $coll - * @param len the number of elements contained in the $coll - * @param f the function that's repeatedly applied - * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` - */ - def iterate[A](start: A, len: Int)(f: A => A): CC[A]^{f} = from(new View.Iterate(start, len)(f)) - - /** Produces a $coll that uses a function `f` to produce elements of type `A` - * and update an internal state of type `S`. - * - * @param init State initial value - * @param f Computes the next element (or returns `None` to signal - * the end of the collection) - * @tparam A Type of the elements - * @tparam S Type of the internal state - * @return a $coll that produces elements using `f` until `f` returns `None` - */ - def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A]^{f} = from(new View.Unfold(init)(f)) - - /** Produces a $coll containing a sequence of increasing of integers. - * - * @param start the first element of the $coll - * @param end the end value of the $coll (the first value NOT contained) - * @return a $coll with values `start, start + 1, ..., end - 1` - */ - def range[A : Integral](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one)) - - /** Produces a $coll containing equally spaced values in some integer interval. - * @param start the start value of the $coll - * @param end the end value of the $coll (the first value NOT contained) - * @param step the difference between successive elements of the $coll (must be positive or negative) - * @return a $coll with values `start, start + step, ...` up to, but excluding `end` - */ - def range[A : Integral](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step)) - - /** - * @return A builder for $Coll objects. - * @tparam A the type of the ${coll}’s elements - */ - def newBuilder[A]: Builder[A, CC[A]] - - /** Produces a $coll containing the results of some element computation a number of times. - * @param n the number of elements contained in the $coll. - * @param elem the element computation - * @return A $coll that contains the results of `n` evaluations of `elem`. - */ - def fill[A](n: Int)(elem: => A): CC[A]^{elem} = from(new View.Fill(n)(elem)) - - /** Produces a two-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc - ??? // fill(n1)(fill(n2)(elem)) - - /** Produces a three-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc - ??? // fill(n1)(fill(n2, n3)(elem)).unsafeAssumePure - - /** Produces a four-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc - ??? // fill(n1)(fill(n2, n3, n4)(elem)) - - /** Produces a five-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param n5 the number of elements in the 5th dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. - */ - def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc - ??? // fill(n1)(fill(n2, n3, n4, n5)(elem)) - - /** Produces a $coll containing values of a given function over a range of integer values starting from 0. - * @param n The number of elements in the $coll - * @param f The function computing element values - * @return A $coll consisting of elements `f(0), ..., f(n -1)` - */ - def tabulate[A](n: Int)(f: Int => A): CC[A]^{f} = from(new View.Tabulate(n)(f)) - - /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2)` - * for `0 <= i1 < n1` and `0 <= i2 < n2`. - */ - def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc - ??? // tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) - - /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. - */ - def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc - ??? // tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) - - /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3, i4)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. - */ - def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc - ??? // tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) - - /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param n5 the number of elements in the 5th dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. - */ - def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc - ??? // tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) - - /** Concatenates all argument collections into a single $coll. - * - * @param xss the collections that are to be concatenated. - * @return the concatenation of all the collections. - */ - def concat[A](xss: Iterable[A]*): CC[A] = { - from(xss.foldLeft(View.empty[A])(_ ++ _)) - } - - implicit def iterableFactory[A]: Factory[A, CC[A]] = IterableFactory.toFactory(this) -} - -object IterableFactory { - - /** - * Fixes the element type of `factory` to `A` - * @param factory The factory to fix the element type - * @tparam A Type of elements - * @tparam CC Collection type constructor of the factory (e.g. `Seq`, `List`) - * @return A [[Factory]] that uses the given `factory` to build a collection of elements - * of type `A` - */ - implicit def toFactory[A, CC[_]](factory: IterableFactory[CC]): Factory[A, CC[A]] = new ToFactory[A, CC](factory) - - @SerialVersionUID(3L) - private[this] class ToFactory[A, CC[_]](factory: IterableFactory[CC]) extends Factory[A, CC[A]] with Serializable { - def fromSpecific(it: IterableOnce[A]^): CC[A] = factory.from[A](it).unsafeAssumePure - // unsafeAssumePure needed but is unsound, since we confuse Seq and Iterable fromSpecific - def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] - } - - implicit def toBuildFrom[A, CC[_]](factory: IterableFactory[CC]): BuildFrom[Any, A, CC[A]] = - new BuildFrom[Any, A, CC[A]] { - def fromSpecific(from: Any)(it: IterableOnce[A]^): CC[A] = - factory.from(it).unsafeAssumePure // !!! see remark in BuildFrom why this is necessary - def newBuilder(from: Any) = factory.newBuilder - } - - @SerialVersionUID(3L) - class Delegate[CC[_]](delegate: IterableFactory[CC]) extends IterableFactory[CC] { - override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) - def empty[A]: CC[A] = delegate.empty - def from[E](it: IterableOnce[E]^): CC[E]^{it} = delegate.from(it) - def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] - } -} - -// !!! Needed to add this separate trait -trait FreeSeqFactory[+CC[A]] extends IterableFactory[CC]: - def from[A](source: IterableOnce[A]^): CC[A] - override def apply[A](elems: A*): CC[A] = from(elems) - -/** - * @tparam CC Collection type constructor (e.g. `List`) - */ -trait SeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends FreeSeqFactory[CC] { - import SeqFactory.UnapplySeqWrapper - final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? -} - -object SeqFactory { - @SerialVersionUID(3L) - class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: SeqFactory[CC]) extends SeqFactory[CC] { - override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) - def empty[A]: CC[A] = delegate.empty - def from[E](it: IterableOnce[E]^): CC[E] = delegate.from(it) - def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] - } - - final class UnapplySeqWrapper[A](private val c: SeqOps[A, Seq, Seq[A]]) extends AnyVal { - def isEmpty: false = false - def get: UnapplySeqWrapper[A] = this - def lengthCompare(len: Int): Int = c.lengthCompare(len) - def apply(i: Int): A = c(i) - def drop(n: Int): scala.Seq[A] = c match { - case seq: scala.Seq[A] => seq.drop(n) - case _ => c.view.drop(n).toSeq - } - def toSeq: scala.Seq[A] = c.toSeq - } -} - -trait StrictOptimizedSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends SeqFactory[CC] { - - override def fill[A](n: Int)(elem: => A): CC[A] = { - val b = newBuilder[A] - b.sizeHint(n) - var i = 0 - while (i < n) { - b += elem - i += 1 - } - b.result() - } - - override def tabulate[A](n: Int)(f: Int => A): CC[A] = { - val b = newBuilder[A] - b.sizeHint(n) - var i = 0 - while (i < n) { - b += f(i) - i += 1 - } - b.result() - } - - override def concat[A](xss: Iterable[A]*): CC[A] = { - val b = newBuilder[A] - val knownSizes = xss.view.map(_.knownSize) - if (knownSizes forall (_ >= 0)) { - b.sizeHint(knownSizes.sum) - } - for (xs <- xss) b ++= xs - b.result() - } - -} - -/** - * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) - * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) - * @define factoryInfo - * This object provides a set of operations to create $Coll values. - * - * @define coll collection - * @define Coll `Iterable` - */ -trait SpecificIterableFactory[-A, +C] extends Factory[A, C] { - this: SpecificIterableFactory[A, C] => - - def empty: C - def apply(xs: A*): C = fromSpecific(xs) - def fill(n: Int)(elem: => A): C = fromSpecific(new View.Fill(n)(elem)) - def newBuilder: Builder[A, C] - - implicit def specificIterableFactory: Factory[A, C] = this -} - -/** - * @define factoryInfo - * This object provides a set of operations to create $Coll values. - * - * @define coll collection - * @define Coll `Iterable` - */ -trait MapFactory[+CC[_, _]] extends Serializable, Pure { - - /** - * An empty Map - */ - def empty[K, V]: CC[K, V] - - /** - * A collection of type Map generated from given iterable object. - */ - def from[K, V](it: IterableOnce[(K, V)]^): CC[K, V] - - /** - * A collection of type Map that contains given key/value bindings. - */ - def apply[K, V](elems: (K, V)*): CC[K, V] = from(elems) - - /** - * The default builder for Map objects. - */ - def newBuilder[K, V]: Builder[(K, V), CC[K, V]] - - /** - * The default Factory instance for maps. - */ - implicit def mapFactory[K, V]: Factory[(K, V), CC[K, V]] = MapFactory.toFactory(this) -} - -object MapFactory { - - /** - * Fixes the key and value types of `factory` to `K` and `V`, respectively - * @param factory The factory to fix the key and value types - * @tparam K Type of keys - * @tparam V Type of values - * @tparam CC Collection type constructor of the factory (e.g. `Map`, `HashMap`, etc.) - * @return A [[Factory]] that uses the given `factory` to build a map with keys of type `K` - * and values of type `V` - */ - implicit def toFactory[K, V, CC[_, _]](factory: MapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory) - - @SerialVersionUID(3L) - private[this] class ToFactory[K, V, CC[_, _]](factory: MapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { - def fromSpecific(it: IterableOnce[(K, V)]^): CC[K, V] = factory.from[K, V](it) - def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] - } - - implicit def toBuildFrom[K, V, CC[_, _]](factory: MapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = - new BuildFrom[Any, (K, V), CC[K, V]] { - def fromSpecific(from: Any)(it: IterableOnce[(K, V)]^) = factory.from(it) - def newBuilder(from: Any) = factory.newBuilder[K, V] - } - - @SerialVersionUID(3L) - class Delegate[C[_, _]](delegate: MapFactory[C]) extends MapFactory[C] { - override def apply[K, V](elems: (K, V)*): C[K, V] = delegate.apply(elems: _*) - def from[K, V](it: IterableOnce[(K, V)]^): C[K, V] = delegate.from(it) - def empty[K, V]: C[K, V] = delegate.empty - def newBuilder[K, V]: Builder[(K, V), C[K, V]] = delegate.newBuilder - } -} - -/** Base trait for companion objects of collections that require an implicit evidence. - * @tparam CC Collection type constructor (e.g. `ArraySeq`) - * @tparam Ev Unary type constructor for the implicit evidence required for an element type - * (typically `Ordering` or `ClassTag`) - * - * @define factoryInfo - * This object provides a set of operations to create $Coll values. - * - * @define coll collection - * @define Coll `Iterable` - */ -trait EvidenceIterableFactory[+CC[_], Ev[_]] extends Serializable, Pure { - - def from[E : Ev](it: IterableOnce[E]^): CC[E] - - def empty[A : Ev]: CC[A] - - def apply[A : Ev](xs: A*): CC[A] = from(xs) - - /** Produces a $coll containing the results of some element computation a number of times. - * @param n the number of elements contained in the $coll. - * @param elem the element computation - * @return A $coll that contains the results of `n` evaluations of `elem`. - */ - def fill[A : Ev](n: Int)(elem: => A): CC[A] = from(new View.Fill(n)(elem)) - - /** Produces a $coll containing values of a given function over a range of integer values starting from 0. - * @param n The number of elements in the $coll - * @param f The function computing element values - * @return A $coll consisting of elements `f(0), ..., f(n -1)` - */ - def tabulate[A : Ev](n: Int)(f: Int => A): CC[A] = from(new View.Tabulate(n)(f)) - - /** Produces a $coll containing repeated applications of a function to a start value. - * - * @param start the start value of the $coll - * @param len the number of elements contained in the $coll - * @param f the function that's repeatedly applied - * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` - */ - def iterate[A : Ev](start: A, len: Int)(f: A => A): CC[A] = from(new View.Iterate(start, len)(f)) - - /** Produces a $coll that uses a function `f` to produce elements of type `A` - * and update an internal state of type `S`. - * - * @param init State initial value - * @param f Computes the next element (or returns `None` to signal - * the end of the collection) - * @tparam A Type of the elements - * @tparam S Type of the internal state - * @return a $coll that produces elements using `f` until `f` returns `None` - */ - def unfold[A : Ev, S](init: S)(f: S => Option[(A, S)]): CC[A] = from(new View.Unfold(init)(f)) - - def newBuilder[A : Ev]: Builder[A, CC[A]] - - implicit def evidenceIterableFactory[A : Ev]: Factory[A, CC[A]] = EvidenceIterableFactory.toFactory(this) -} - -object EvidenceIterableFactory { - - /** - * Fixes the element type of `factory` to `A` - * @param factory The factory to fix the element type - * @tparam A Type of elements - * @tparam CC Collection type constructor of the factory (e.g. `TreeSet`) - * @tparam Ev Type constructor of the evidence (usually `Ordering` or `ClassTag`) - * @return A [[Factory]] that uses the given `factory` to build a collection of elements - * of type `A` - */ - implicit def toFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): Factory[A, CC[A]] = new ToFactory[Ev, A, CC](factory) - - @SerialVersionUID(3L) - private[this] class ToFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends Factory[A, CC[A]] with Serializable { - def fromSpecific(it: IterableOnce[A]^): CC[A] = factory.from[A](it) - def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] - } - - implicit def toBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): BuildFrom[Any, A, CC[A]] = new EvidenceIterableFactoryToBuildFrom(factory) - private class EvidenceIterableFactoryToBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends BuildFrom[Any, A, CC[A]] { - def fromSpecific(from: Any)(it: IterableOnce[A]^): CC[A] = factory.from[A](it) - def newBuilder(from: Any): Builder[A, CC[A]] = factory.newBuilder[A] - } - - @SerialVersionUID(3L) - class Delegate[CC[_], Ev[_]](delegate: EvidenceIterableFactory[CC, Ev]) extends EvidenceIterableFactory[CC, Ev] { - override def apply[A: Ev](xs: A*): CC[A] = delegate.apply(xs: _*) - def empty[A : Ev]: CC[A] = delegate.empty - def from[E : Ev](it: IterableOnce[E]^): CC[E] = delegate.from(it) - def newBuilder[A : Ev]: Builder[A, CC[A]] = delegate.newBuilder[A] - } -} - -/** Base trait for companion objects of collections that require an implicit `Ordering`. - * @tparam CC Collection type constructor (e.g. `SortedSet`) - */ -trait SortedIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, Ordering] - -object SortedIterableFactory { - @SerialVersionUID(3L) - class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, Ordering]) - extends EvidenceIterableFactory.Delegate[CC, Ordering](delegate) with SortedIterableFactory[CC] -} - -/** Base trait for companion objects of collections that require an implicit `ClassTag`. - * @tparam CC Collection type constructor (e.g. `ArraySeq`) - */ -trait ClassTagIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, ClassTag] { - - @`inline` private[this] implicit def ccClassTag[X]: ClassTag[CC[X]] = - ClassTag.AnyRef.asInstanceOf[ClassTag[CC[X]]] // Good enough for boxed vs primitive arrays - - /** Produces a $coll containing a sequence of increasing of integers. - * - * @param start the first element of the $coll - * @param end the end value of the $coll (the first value NOT contained) - * @return a $coll with values `start, start + 1, ..., end - 1` - */ - def range[A : Integral : ClassTag](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one)) - - /** Produces a $coll containing equally spaced values in some integer interval. - * @param start the start value of the $coll - * @param end the end value of the $coll (the first value NOT contained) - * @param step the difference between successive elements of the $coll (must be positive or negative) - * @return a $coll with values `start, start + step, ...` up to, but excluding `end` - */ - def range[A : Integral : ClassTag](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step)) - - /** Produces a two-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. - */ - def fill[A : ClassTag](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance] = fill(n1)(fill(n2)(elem)) - - /** Produces a three-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. - */ - def fill[A : ClassTag](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance] = fill(n1)(fill(n2, n3)(elem)) - - /** Produces a four-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. - */ - def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = - fill(n1)(fill(n2, n3, n4)(elem)) - - /** Produces a five-dimensional $coll containing the results of some element computation a number of times. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param n5 the number of elements in the 5th dimension - * @param elem the element computation - * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. - */ - def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = - fill(n1)(fill(n2, n3, n4, n5)(elem)) - - /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2)` - * for `0 <= i1 < n1` and `0 <= i2 < n2`. - */ - def tabulate[A : ClassTag](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance] = - tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) - - /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. - */ - def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance] = - tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) - - /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3, i4)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. - */ - def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = - tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) - - /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. - * @param n1 the number of elements in the 1st dimension - * @param n2 the number of elements in the 2nd dimension - * @param n3 the number of elements in the 3rd dimension - * @param n4 the number of elements in the 4th dimension - * @param n5 the number of elements in the 5th dimension - * @param f The function computing element values - * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` - * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. - */ - def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = - tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) -} - -object ClassTagIterableFactory { - @SerialVersionUID(3L) - class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, ClassTag]) - extends EvidenceIterableFactory.Delegate[CC, ClassTag](delegate) with ClassTagIterableFactory[CC] - - /** An IterableFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be - * sound depending on the use of the `ClassTag` by the collection implementation. */ - @SerialVersionUID(3L) - class AnyIterableDelegate[CC[_]](delegate: ClassTagIterableFactory[CC]) extends IterableFactory[CC] { - def empty[A]: CC[A] = delegate.empty(ClassTag.Any).asInstanceOf[CC[A]] - def from[A](it: IterableOnce[A]^): CC[A] = delegate.from[Any](it)(ClassTag.Any).asInstanceOf[CC[A]] - def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder(ClassTag.Any).asInstanceOf[Builder[A, CC[A]]] - override def apply[A](elems: A*): CC[A] = delegate.apply[Any](elems: _*)(ClassTag.Any).asInstanceOf[CC[A]] - override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = delegate.iterate[A](start, len)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]]) - override def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = delegate.unfold[A, S](init)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]]) - override def range[A](start: A, end: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end)(i, ClassTag.Any.asInstanceOf[ClassTag[A]]) - override def range[A](start: A, end: A, step: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end, step)(i, ClassTag.Any.asInstanceOf[ClassTag[A]]) - override def fill[A](n: Int)(elem: => A): CC[A] = delegate.fill[Any](n)(elem)(ClassTag.Any).asInstanceOf[CC[A]] - override def tabulate[A](n: Int)(f: Int => A): CC[A] = delegate.tabulate[Any](n)(f)(ClassTag.Any).asInstanceOf[CC[A]] - } -} - -/** - * @tparam CC Collection type constructor (e.g. `ArraySeq`) - */ -trait ClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagIterableFactory[CC] { - import SeqFactory.UnapplySeqWrapper - final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? -} - -object ClassTagSeqFactory { - @SerialVersionUID(3L) - class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC]) - extends ClassTagIterableFactory.Delegate[CC](delegate) with ClassTagSeqFactory[CC] - - /** A SeqFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be - * sound depending on the use of the `ClassTag` by the collection implementation. */ - @SerialVersionUID(3L) - class AnySeqDelegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC]) - extends ClassTagIterableFactory.AnyIterableDelegate[CC](delegate) with SeqFactory[CC] -} - -trait StrictOptimizedClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagSeqFactory[CC] { - - override def fill[A : ClassTag](n: Int)(elem: => A): CC[A] = { - val b = newBuilder[A] - b.sizeHint(n) - var i = 0 - while (i < n) { - b += elem - i += 1 - } - b.result() - } - - override def tabulate[A : ClassTag](n: Int)(f: Int => A): CC[A] = { - val b = newBuilder[A] - b.sizeHint(n) - var i = 0 - while (i < n) { - b += f(i) - i += 1 - } - b.result() - } - -} - -/** - * @define factoryInfo - * This object provides a set of operations to create $Coll values. - * - * @define coll collection - * @define Coll `Iterable` - */ -trait SortedMapFactory[+CC[_, _]] extends Serializable { - this: SortedMapFactory[CC] => - - def empty[K : Ordering, V]: CC[K, V] - - def from[K : Ordering, V](it: IterableOnce[(K, V)]^): CC[K, V] - - def apply[K : Ordering, V](elems: (K, V)*): CC[K, V] = from(elems) - - def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] - - implicit def sortedMapFactory[K : Ordering, V]: Factory[(K, V), CC[K, V]] = SortedMapFactory.toFactory(this) - -} - -object SortedMapFactory { - - /** - * Implicit conversion that fixes the key and value types of `factory` to `K` and `V`, - * respectively. - * - * @param factory The factory to fix the key and value types - * @tparam K Type of keys - * @tparam V Type of values - * @tparam CC Collection type constructor of the factory (e.g. `TreeMap`) - * @return A [[Factory]] that uses the given `factory` to build a map with keys of - * type `K` and values of type `V` - */ - implicit def toFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory) - - @SerialVersionUID(3L) - private[this] class ToFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { - def fromSpecific(it: IterableOnce[(K, V)]^): CC[K, V] = factory.from[K, V](it) - def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] - } - - implicit def toBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = new SortedMapFactoryToBuildFrom(factory) - private class SortedMapFactoryToBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends BuildFrom[Any, (K, V), CC[K, V]] { - def fromSpecific(from: Any)(it: IterableOnce[(K, V)]^) = factory.from(it) - def newBuilder(from: Any) = factory.newBuilder[K, V] - } - - @SerialVersionUID(3L) - class Delegate[CC[_, _]](delegate: SortedMapFactory[CC]) extends SortedMapFactory[CC] { - override def apply[K: Ordering, V](elems: (K, V)*): CC[K, V] = delegate.apply(elems: _*) - def from[K : Ordering, V](it: IterableOnce[(K, V)]^): CC[K, V] = delegate.from(it) - def empty[K : Ordering, V]: CC[K, V] = delegate.empty - def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] = delegate.newBuilder - } -} diff --git a/tests/pos-special/stdlib/collection/Hashing.scala b/tests/pos-special/stdlib/collection/Hashing.scala deleted file mode 100644 index 772dcf5c65da..000000000000 --- a/tests/pos-special/stdlib/collection/Hashing.scala +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -import language.experimental.captureChecking - - -protected[collection] object Hashing { - - def elemHashCode(key: Any): Int = key.## - - def improve(hcode: Int): Int = { - var h: Int = hcode + ~(hcode << 9) - h = h ^ (h >>> 14) - h = h + (h << 4) - h ^ (h >>> 10) - } - - def computeHash(key: Any): Int = - improve(elemHashCode(key)) - - /** - * Utility method to keep a subset of all bits in a given bitmap - * - * Example - * bitmap (binary): 00000001000000010000000100000001 - * keep (binary): 1010 - * result (binary): 00000001000000000000000100000000 - * - * @param bitmap the bitmap - * @param keep a bitmask containing which bits to keep - * @return the original bitmap with all bits where keep is not 1 set to 0 - */ - def keepBits(bitmap: Int, keep: Int): Int = { - var result = 0 - var current = bitmap - var kept = keep - while (kept != 0) { - // lowest remaining bit in current - val lsb = current ^ (current & (current - 1)) - if ((kept & 1) != 0) { - // mark bit in result bitmap - result |= lsb - } - // clear lowest remaining one bit in abm - current &= ~lsb - // look at the next kept bit - kept >>>= 1 - } - result - } - -} diff --git a/tests/pos-special/stdlib/collection/IndexedSeq.scala b/tests/pos-special/stdlib/collection/IndexedSeq.scala index a2d4cc942231..6e8e2bd0dc66 100644 --- a/tests/pos-special/stdlib/collection/IndexedSeq.scala +++ b/tests/pos-special/stdlib/collection/IndexedSeq.scala @@ -18,8 +18,6 @@ import scala.collection.Searching.{Found, InsertionPoint, SearchResult} import scala.collection.Stepper.EfficientSplit import scala.math.Ordering import language.experimental.captureChecking -import caps.unsafe.unsafeAssumePure - /** Base trait for indexed sequences that have efficient `apply` and `length` */ trait IndexedSeq[+A] extends Seq[A] @@ -35,7 +33,7 @@ trait IndexedSeq[+A] extends Seq[A] object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](immutable.IndexedSeq) /** Base trait for indexed Seq operations */ -trait IndexedSeqOps[+A, +CC[_], +C] extends Any with IndexedSeqViewOps[A, CC, C] with SeqOps[A, CC, C] { self => +trait IndexedSeqOps[+A, +CC[_], +C] extends Any with SeqOps[A, CC, C] { self => def iterator: Iterator[A] = view.iterator @@ -88,7 +86,7 @@ trait IndexedSeqOps[+A, +CC[_], +C] extends Any with IndexedSeqViewOps[A, CC, C] override def dropRight(n: Int): C = fromSpecific(new IndexedSeqView.DropRight(this, n)) - override def map[B](f: A => B): CC[B] = iterableFactory.from(new IndexedSeqView.Map(this, f)).unsafeAssumePure + override def map[B](f: A => B): CC[B] = iterableFactory.from(new IndexedSeqView.Map(this, f)) override def reverse: C = fromSpecific(new IndexedSeqView.Reverse(this)) diff --git a/tests/pos-special/stdlib/collection/IndexedSeqView.scala b/tests/pos-special/stdlib/collection/IndexedSeqView.scala deleted file mode 100644 index a16e06fa707d..000000000000 --- a/tests/pos-special/stdlib/collection/IndexedSeqView.scala +++ /dev/null @@ -1,187 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import scala.annotation.nowarn -import language.experimental.captureChecking - -trait IndexedSeqViewOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { - self: IndexedSeqViewOps[A, CC, C]^ => -} - -/** View defined in terms of indexing a range */ -trait IndexedSeqView[+A] extends IndexedSeqViewOps[A, View, View[A]] with SeqView[A] { - self: IndexedSeqView[A]^ => - - override def view: IndexedSeqView[A]^{this} = this - - @deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0") - override def view(from: Int, until: Int): IndexedSeqView[A]^{this} = view.slice(from, until) - - override def iterator: Iterator[A]^{this} = new IndexedSeqView.IndexedSeqViewIterator(this) - override def reverseIterator: Iterator[A]^{this} = new IndexedSeqView.IndexedSeqViewReverseIterator(this) - - override def appended[B >: A](elem: B): IndexedSeqView[B]^{this} = new IndexedSeqView.Appended(this, elem) - override def prepended[B >: A](elem: B): IndexedSeqView[B]^{this} = new IndexedSeqView.Prepended(elem, this) - override def take(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Take(this, n) - override def takeRight(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.TakeRight(this, n) - override def drop(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Drop(this, n) - override def dropRight(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.DropRight(this, n) - override def map[B](f: A => B): IndexedSeqView[B]^{this, f} = new IndexedSeqView.Map(this, f) - override def reverse: IndexedSeqView[A]^{this} = new IndexedSeqView.Reverse(this) - override def slice(from: Int, until: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Slice(this, from, until) - override def tapEach[U](f: A => U): IndexedSeqView[A]^{this, f} = new IndexedSeqView.Map(this, { (a: A) => f(a); a}) - - def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(this, suffix) - def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(this, suffix) - def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(prefix, this) - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix: String = "IndexedSeqView" -} - -object IndexedSeqView { - - @SerialVersionUID(3L) - private[collection] class IndexedSeqViewIterator[A](self: IndexedSeqView[A]^) extends AbstractIterator[A] with Serializable { - this: IndexedSeqViewIterator[A]^ => - private[this] var current = 0 - private[this] var remainder = self.length - override def knownSize: Int = remainder - @inline private[this] def _hasNext: Boolean = remainder > 0 - def hasNext: Boolean = _hasNext - def next(): A = - if (_hasNext) { - val r = self(current) - current += 1 - remainder -= 1 - r - } else Iterator.empty.next() - - override def drop(n: Int): Iterator[A]^{this} = { - if (n > 0) { - current += n - remainder = Math.max(0, remainder - n) - } - this - } - - override protected def sliceIterator(from: Int, until: Int): Iterator[A]^{this} = { - - def formatRange(value : Int) : Int = if (value < 0) 0 else if (value > remainder) remainder else value - - val formatFrom = formatRange(from) - val formatUntil = formatRange(until) - remainder = Math.max(0, formatUntil - formatFrom) - current = current + formatFrom - this - } - } - @SerialVersionUID(3L) - private[collection] class IndexedSeqViewReverseIterator[A](self: IndexedSeqView[A]^) extends AbstractIterator[A] with Serializable { - this: IndexedSeqViewReverseIterator[A]^ => - private[this] var remainder = self.length - private[this] var pos = remainder - 1 - @inline private[this] def _hasNext: Boolean = remainder > 0 - def hasNext: Boolean = _hasNext - def next(): A = - if (_hasNext) { - val r = self(pos) - pos -= 1 - remainder -= 1 - r - } else Iterator.empty.next() - - // from < 0 means don't move pos, until < 0 means don't limit remainder - // - override protected def sliceIterator(from: Int, until: Int): Iterator[A]^{this} = { - if (_hasNext) { - if (remainder <= from) remainder = 0 // exhausted by big skip - else if (from <= 0) { // no skip, pos is same - if (until >= 0 && until < remainder) remainder = until // ...limited by until - } - else { - pos -= from // skip ahead - if (until >= 0 && until < remainder) { // ...limited by until - if (until <= from) remainder = 0 // ...exhausted if limit is smaller than skip - else remainder = until - from // ...limited by until, less the skip - } - else remainder -= from // ...otherwise just less the skip - } - } - this - } - } - - /** An `IndexedSeqViewOps` whose collection type and collection type constructor are unknown */ - type SomeIndexedSeqOps[A] = IndexedSeqViewOps[A, AnyConstr, _] - - @SerialVersionUID(3L) - class Id[+A](underlying: SomeIndexedSeqOps[A]^) - extends SeqView.Id(underlying) with IndexedSeqView[A] - - @SerialVersionUID(3L) - class Appended[+A](underlying: SomeIndexedSeqOps[A]^, elem: A) - extends SeqView.Appended(underlying, elem) with IndexedSeqView[A] - - @SerialVersionUID(3L) - class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A]^) - extends SeqView.Prepended(elem, underlying) with IndexedSeqView[A] - - @SerialVersionUID(3L) - class Concat[A](prefix: SomeIndexedSeqOps[A]^, suffix: SomeIndexedSeqOps[A]^) - extends SeqView.Concat[A](prefix, suffix) with IndexedSeqView[A] - - @SerialVersionUID(3L) - class Take[A](underlying: SomeIndexedSeqOps[A]^, n: Int) - extends SeqView.Take(underlying, n) with IndexedSeqView[A] - - @SerialVersionUID(3L) - class TakeRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int) - extends SeqView.TakeRight(underlying, n) with IndexedSeqView[A] - - @SerialVersionUID(3L) - class Drop[A](underlying: SomeIndexedSeqOps[A]^, n: Int) - extends SeqView.Drop[A](underlying, n) with IndexedSeqView[A] - - @SerialVersionUID(3L) - class DropRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int) - extends SeqView.DropRight[A](underlying, n) with IndexedSeqView[A] - - @SerialVersionUID(3L) - class Map[A, B](underlying: SomeIndexedSeqOps[A]^, f: A => B) - extends SeqView.Map(underlying, f) with IndexedSeqView[B] - - @SerialVersionUID(3L) - class Reverse[A](underlying: SomeIndexedSeqOps[A]^) extends SeqView.Reverse[A](underlying) with IndexedSeqView[A] { - override def reverse: IndexedSeqView[A] = underlying match { - case x: IndexedSeqView[A] => x - case _ => super.reverse - } - } - - @SerialVersionUID(3L) - class Slice[A](underlying: SomeIndexedSeqOps[A]^, from: Int, until: Int) extends AbstractIndexedSeqView[A] { - protected val lo = from max 0 - protected val hi = (until max 0) min underlying.length - protected val len = (hi - lo) max 0 - @throws[IndexOutOfBoundsException] - def apply(i: Int): A = underlying(lo + i) - def length: Int = len - } -} - -/** Explicit instantiation of the `IndexedSeqView` trait to reduce class file size in subclasses. */ -@SerialVersionUID(3L) -abstract class AbstractIndexedSeqView[+A] extends AbstractSeqView[A] with IndexedSeqView[A] diff --git a/tests/pos-special/stdlib/collection/Iterable.scala b/tests/pos-special/stdlib/collection/Iterable.scala index bca80d7be108..85c0debc6685 100644 --- a/tests/pos-special/stdlib/collection/Iterable.scala +++ b/tests/pos-special/stdlib/collection/Iterable.scala @@ -96,7 +96,7 @@ trait Iterable[+A] extends IterableOnce[A] * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. */ - def lazyZip[B](that: Iterable[B]^): LazyZip2[A, B, this.type]^{this, that} = new LazyZip2(this, this, that) + def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, this.type] = new LazyZip2(this, this, that) } /** Base trait for Iterable operations @@ -400,7 +400,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable if (i != headSize) fail } - iterableFactory.from(bs.map(_.result())).asInstanceOf // !!! needed for cc + iterableFactory.from(bs.map(_.result())) } def filter(pred: A => Boolean): C^{this, pred} = fromSpecific(new View.Filter(this, pred, isFlipped = false)) @@ -902,10 +902,10 @@ object IterableOps { protected def filtered: Iterable[A]^{this} = new View.Filter(self, p, isFlipped = false) - def map[B](f: A => B): CC[B]^{this, f} = + def map[B](f: A => B): CC[B]^{this} = self.iterableFactory.from(new View.Map(filtered, f)) - def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} = + def flatMap[B](f: A => IterableOnce[B]): CC[B]^{this} = self.iterableFactory.from(new View.FlatMap(filtered, f)) def foreach[U](f: A => U): Unit = filtered.foreach(f) diff --git a/tests/pos-special/stdlib/collection/IterableOnce.scala b/tests/pos-special/stdlib/collection/IterableOnce.scala index a88be4943c58..6836a3bac39a 100644 --- a/tests/pos-special/stdlib/collection/IterableOnce.scala +++ b/tests/pos-special/stdlib/collection/IterableOnce.scala @@ -162,10 +162,10 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext def to[C1](factory: Factory[A, C1]): C1 = factory.fromSpecific(it) @deprecated("Use .iterator.to(ArrayBuffer) instead", "2.13.0") - def toBuffer[sealed B >: A]: mutable.Buffer[B] = mutable.ArrayBuffer.from(it) + def toBuffer[B >: A]: mutable.Buffer[B] = mutable.ArrayBuffer.from(it) @deprecated("Use .iterator.toArray", "2.13.0") - def toArray[sealed B >: A: ClassTag]: Array[B] = it match { + def toArray[B >: A: ClassTag]: Array[B] = it match { case it: Iterable[B] => it.toArray[B] case _ => it.iterator.toArray[B] } @@ -272,11 +272,10 @@ object IterableOnce { math.max(math.min(math.min(len, srcLen), destLen - start), 0) /** Calls `copyToArray` on the given collection, regardless of whether or not it is an `Iterable`. */ - @inline private[collection] def copyElemsToArray[A, sealed B >: A]( - elems: IterableOnce[A]^, - xs: Array[B], - start: Int = 0, - len: Int = Int.MaxValue): Int = + @inline private[collection] def copyElemsToArray[A, B >: A](elems: IterableOnce[A], + xs: Array[B], + start: Int = 0, + len: Int = Int.MaxValue): Int = elems match { case src: Iterable[A] => src.copyToArray[B](xs, start, len) case src => src.iterator.copyToArray[B](xs, start, len) @@ -890,7 +889,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @note Reuse: $consumesIterator */ @deprecatedOverriding("This should always forward to the 3-arg version of this method", since = "2.13.4") - def copyToArray[sealed B >: A](xs: Array[B]): Int = copyToArray(xs, 0, Int.MaxValue) + def copyToArray[B >: A](xs: Array[B]): Int = copyToArray(xs, 0, Int.MaxValue) /** Copy elements to an array, returning the number of elements written. * @@ -907,7 +906,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @note Reuse: $consumesIterator */ @deprecatedOverriding("This should always forward to the 3-arg version of this method", since = "2.13.4") - def copyToArray[sealed B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) + def copyToArray[B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) /** Copy elements to an array, returning the number of elements written. * @@ -924,7 +923,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * * @note Reuse: $consumesIterator */ - def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { + def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { val it = iterator var i = start val end = start + math.min(len, xs.length - start) @@ -1313,13 +1312,13 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => @deprecated("Use .to(LazyList) instead of .toStream", "2.13.0") @`inline` final def toStream: immutable.Stream[A] = to(immutable.Stream) - @`inline` final def toBuffer[sealed B >: A]: mutable.Buffer[B] = mutable.Buffer.from(this) + @`inline` final def toBuffer[B >: A]: mutable.Buffer[B] = mutable.Buffer.from(this) /** Convert collection to array. * * Implementation note: DO NOT call [[Array.from]] from this method. */ - def toArray[sealed B >: A: ClassTag]: Array[B] = + def toArray[B >: A: ClassTag]: Array[B] = if (knownSize >= 0) { val destination = new Array[B](knownSize) copyToArray(destination, 0) diff --git a/tests/pos-special/stdlib/collection/Iterator.scala b/tests/pos-special/stdlib/collection/Iterator.scala index 90fd387069b0..ecd8d985bbf0 100644 --- a/tests/pos-special/stdlib/collection/Iterator.scala +++ b/tests/pos-special/stdlib/collection/Iterator.scala @@ -17,7 +17,7 @@ import scala.annotation.tailrec import scala.annotation.unchecked.{uncheckedVariance, uncheckedCaptures} import scala.runtime.Statics import language.experimental.captureChecking -import annotation.unchecked.uncheckedCaptures +import caps.unsafe.unsafeAssumePure /** Iterators are data structures that allow to iterate over a sequence @@ -258,7 +258,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } // segment must have data, and must be complete unless they allow partial val ok = index > 0 && (partial || index == size) - if (ok) buffer = builder.result().asInstanceOf[Array[B @uncheckedCaptures]] + if (ok) buffer = builder.result().asInstanceOf[Array[B]] else prev = null ok } @@ -416,9 +416,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } @deprecated("Call scanRight on an Iterable instead.", "2.13.0") - def scanRight[B](z: B)(op: (A, B) => B): Iterator[B]^{this} = - ArrayBuffer.from[A @uncheckedCaptures](this).scanRight(z)(op).iterator - // @uncheckedCaptures is safe since the ArrayBuffer is local temporrary storage + def scanRight[B](z: B)(op: (A, B) => B): Iterator[B]^{this} = ArrayBuffer.from(this).scanRight(z)(op).iterator def indexWhere(p: A => Boolean, from: Int = 0): Int = { var i = math.max(from, 0) @@ -561,7 +559,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite */ def distinctBy[B](f: A -> B): Iterator[A]^{this} = new AbstractIterator[A] { - private[this] val traversedValues = mutable.HashSet.empty[B @uncheckedCaptures] + private[this] val traversedValues = mutable.HashSet.empty[B] private[this] var nextElementDefined: Boolean = false private[this] var nextElement: A = _ @@ -704,7 +702,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite */ private[this] var status = 0 private def store(a: A): Unit = { - if (lookahead == null) lookahead = new mutable.Queue[A @uncheckedCaptures] + if (lookahead == null) lookahead = new mutable.Queue[A] lookahead += a } def hasNext = { @@ -867,8 +865,8 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @note Reuse: $consumesOneAndProducesTwoIterators */ def duplicate: (Iterator[A]^{this}, Iterator[A]^{this}) = { - val gap = new scala.collection.mutable.Queue[A @uncheckedCaptures] - var ahead: Iterator[A @uncheckedCaptures] = null // ahead is captured by Partner, so A is not recognized as parametric + val gap = new scala.collection.mutable.Queue[A] + var ahead: Iterator[A] = null class Partner extends AbstractIterator[A] { override def knownSize: Int = self.synchronized { val thisSize = self.knownSize @@ -1145,7 +1143,9 @@ object Iterator extends IterableFactory[Iterator] { * Nested ConcatIterators are merged to avoid blowing the stack. */ private final class ConcatIterator[+A](val from: Iterator[A]^) extends AbstractIterator[A] { - private var current: Iterator[A @uncheckedCaptures]^{cap[ConcatIterator]} = from + private var current: Iterator[A @uncheckedCaptures] = from.unsafeAssumePure + // This should be Iteratpr[A]^, but fails since mutable variables can't capture cap. + // To do better we'd need to track nesting levels for universal capabiltities. private var tail: ConcatIteratorCell[A @uncheckedVariance @uncheckedCaptures] = null private var last: ConcatIteratorCell[A @uncheckedVariance @uncheckedCaptures] = null private var currentHasNextChecked = false diff --git a/tests/pos-special/stdlib/collection/JavaConverters.scala b/tests/pos-special/stdlib/collection/JavaConverters.scala deleted file mode 100644 index 69130eae1829..000000000000 --- a/tests/pos-special/stdlib/collection/JavaConverters.scala +++ /dev/null @@ -1,336 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import java.util.{concurrent => juc} -import java.{lang => jl, util => ju} - -import scala.collection.convert._ -import scala.language.implicitConversions -import language.experimental.captureChecking - -/** A variety of decorators that enable converting between - * Scala and Java collections using extension methods, `asScala` and `asJava`. - * - * The extension methods return adapters for the corresponding API. - * - * The following conversions are supported via `asScala` and `asJava`: - *{{{ - * scala.collection.Iterable <=> java.lang.Iterable - * scala.collection.Iterator <=> java.util.Iterator - * scala.collection.mutable.Buffer <=> java.util.List - * scala.collection.mutable.Set <=> java.util.Set - * scala.collection.mutable.Map <=> java.util.Map - * scala.collection.concurrent.Map <=> java.util.concurrent.ConcurrentMap - *}}} - * The following conversions are supported via `asScala` and through - * specially-named extension methods to convert to Java collections, as shown: - *{{{ - * scala.collection.Iterable <=> java.util.Collection (via asJavaCollection) - * scala.collection.Iterator <=> java.util.Enumeration (via asJavaEnumeration) - * scala.collection.mutable.Map <=> java.util.Dictionary (via asJavaDictionary) - *}}} - * In addition, the following one-way conversions are provided via `asJava`: - *{{{ - * scala.collection.Seq => java.util.List - * scala.collection.mutable.Seq => java.util.List - * scala.collection.Set => java.util.Set - * scala.collection.Map => java.util.Map - *}}} - * The following one way conversion is provided via `asScala`: - *{{{ - * java.util.Properties => scala.collection.mutable.Map - *}}} - * In all cases, converting from a source type to a target type and back - * again will return the original source object. For example: - * {{{ - * import scala.collection.JavaConverters._ - * - * val source = new scala.collection.mutable.ListBuffer[Int] - * val target: java.util.List[Int] = source.asJava - * val other: scala.collection.mutable.Buffer[Int] = target.asScala - * assert(source eq other) - * }}} - * Alternatively, the conversion methods have descriptive names and can be invoked explicitly. - * {{{ - * scala> val vs = java.util.Arrays.asList("hi", "bye") - * vs: java.util.List[String] = [hi, bye] - * - * scala> val ss = asScalaIterator(vs.iterator) - * ss: Iterator[String] = - * - * scala> .toList - * res0: List[String] = List(hi, bye) - * - * scala> val ss = asScalaBuffer(vs) - * ss: scala.collection.mutable.Buffer[String] = Buffer(hi, bye) - * }}} - */ -@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") -object JavaConverters extends AsJavaConverters with AsScalaConverters { - @deprecated("Use `asJava` instead", "2.13.0") - def asJavaIterator[A](i: Iterator[A]): ju.Iterator[A] = asJava(i) - - @deprecated("Use `asJava` instead", "2.13.0") - def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = asJava(i) - - @deprecated("Use `asJava` instead", "2.13.0") - def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = asJava(b) - - @deprecated("Use `asJava` instead", "2.13.0") - def mutableSeqAsJavaList[A](s: mutable.Seq[A]): ju.List[A] = asJava(s) - - @deprecated("Use `asJava` instead", "2.13.0") - def seqAsJavaList[A](s: Seq[A]): ju.List[A] = asJava(s) - - @deprecated("Use `asJava` instead", "2.13.0") - def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = asJava(s) - - @deprecated("Use `asJava` instead", "2.13.0") - def setAsJavaSet[A](s: Set[A]): ju.Set[A] = asJava(s) - - @deprecated("Use `asJava` instead", "2.13.0") - def mutableMapAsJavaMap[K, V](m: mutable.Map[K, V]): ju.Map[K, V] = asJava(m) - - @deprecated("Use `asJava` instead", "2.13.0") - def mapAsJavaMap[K, V](m: Map[K, V]): ju.Map[K, V] = asJava(m) - - @deprecated("Use `asJava` instead", "2.13.0") - def mapAsJavaConcurrentMap[K, V](m: concurrent.Map[K, V]): juc.ConcurrentMap[K, V] = asJava(m) - - - @deprecated("Use `asScala` instead", "2.13.0") - def asScalaIterator[A](i: ju.Iterator[A]): Iterator[A] = asScala(i) - - @deprecated("Use `asScala` instead", "2.13.0") - def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = asScala(i) - - @deprecated("Use `asScala` instead", "2.13.0") - def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = asScala(i) - - @deprecated("Use `asScala` instead", "2.13.0") - def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = asScala(i) - - @deprecated("Use `asScala` instead", "2.13.0") - def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = asScala(l) - - @deprecated("Use `asScala` instead", "2.13.0") - def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = asScala(s) - - @deprecated("Use `asScala` instead", "2.13.0") - def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = asScala(m) - - @deprecated("Use `asScala` instead", "2.13.0") - def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = asScala(m) - - @deprecated("Use `asScala` instead", "2.13.0") - def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = asScala(p) - - @deprecated("Use `asScala` instead", "2.13.0") - def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = asScala(p) - - // Deprecated implicit conversions for code that directly imports them - - /** - * Adds an `asJava` method that implicitly converts a Scala `Iterator` to a Java `Iterator`. - * @see [[asJavaIterator]] - */ - implicit def asJavaIteratorConverter[A](i : Iterator[A]): AsJava[ju.Iterator[A]] = - new AsJava(asJavaIterator(i)) - - /** - * Adds an `asJavaEnumeration` method that implicitly converts a Scala `Iterator` to a Java `Enumeration`. - * @see [[asJavaEnumeration]] - */ - implicit def asJavaEnumerationConverter[A](i : Iterator[A]): AsJavaEnumeration[A] = - new AsJavaEnumeration(i) - - /** - * Adds an `asJava` method that implicitly converts a Scala `Iterable` to a Java `Iterable`. - * @see [[asJavaIterable]] - */ - implicit def asJavaIterableConverter[A](i : Iterable[A]): AsJava[jl.Iterable[A]] = - new AsJava(asJavaIterable(i)) - - /** - * Adds an `asJavaCollection` method that implicitly converts a Scala `Iterable` to an immutable Java `Collection`. - * @see [[asJavaCollection]] - */ - implicit def asJavaCollectionConverter[A](i : Iterable[A]): AsJavaCollection[A] = - new AsJavaCollection(i) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `Buffer` to a Java `List`. - * @see [[bufferAsJavaList]] - */ - implicit def bufferAsJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] = - new AsJava(bufferAsJavaList(b)) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `Seq` to a Java `List`. - * @see [[mutableSeqAsJavaList]] - */ - implicit def mutableSeqAsJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] = - new AsJava(mutableSeqAsJavaList(b)) - - /** - * Adds an `asJava` method that implicitly converts a Scala `Seq` to a Java `List`. - * @see [[seqAsJavaList]] - */ - implicit def seqAsJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] = - new AsJava(seqAsJavaList(b)) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `Set` to a Java `Set`. - * @see [[mutableSetAsJavaSet]] - */ - implicit def mutableSetAsJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] = - new AsJava(mutableSetAsJavaSet(s)) - - /** - * Adds an `asJava` method that implicitly converts a Scala `Set` to a Java `Set`. - * @see [[setAsJavaSet]] - */ - implicit def setAsJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] = - new AsJava(setAsJavaSet(s)) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `Map` to a Java `Map`. - * @see [[mutableMapAsJavaMap]] - */ - implicit def mutableMapAsJavaMapConverter[K, V](m : mutable.Map[K, V]): AsJava[ju.Map[K, V]] = - new AsJava(mutableMapAsJavaMap(m)) - - /** - * Adds an `asJavaDictionary` method that implicitly converts a Scala mutable `Map` to a Java `Dictionary`. - * @see [[asJavaDictionary]] - */ - implicit def asJavaDictionaryConverter[K, V](m : mutable.Map[K, V]): AsJavaDictionary[K, V] = - new AsJavaDictionary(m) - - /** - * Adds an `asJava` method that implicitly converts a Scala `Map` to a Java `Map`. - * @see [[mapAsJavaMap]] - */ - implicit def mapAsJavaMapConverter[K, V](m : Map[K, V]): AsJava[ju.Map[K, V]] = - new AsJava(mapAsJavaMap(m)) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`. - * @see [[mapAsJavaConcurrentMap]]. - */ - implicit def mapAsJavaConcurrentMapConverter[K, V](m: concurrent.Map[K, V]): AsJava[juc.ConcurrentMap[K, V]] = - new AsJava(mapAsJavaConcurrentMap(m)) - - - /** - * Adds an `asScala` method that implicitly converts a Java `Iterator` to a Scala `Iterator`. - * @see [[asScalaIterator]] - */ - implicit def asScalaIteratorConverter[A](i : ju.Iterator[A]): AsScala[Iterator[A]] = - new AsScala(asScalaIterator(i)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Enumeration` to a Scala `Iterator`. - * @see [[enumerationAsScalaIterator]] - */ - implicit def enumerationAsScalaIteratorConverter[A](i : ju.Enumeration[A]): AsScala[Iterator[A]] = - new AsScala(enumerationAsScalaIterator(i)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Iterable` to a Scala `Iterable`. - * @see [[iterableAsScalaIterable]] - */ - implicit def iterableAsScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] = - new AsScala(iterableAsScalaIterable(i)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Collection` to an Scala `Iterable`. - * @see [[collectionAsScalaIterable]] - */ - implicit def collectionAsScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] = - new AsScala(collectionAsScalaIterable(i)) - - /** - * Adds an `asScala` method that implicitly converts a Java `List` to a Scala mutable `Buffer`. - * @see [[asScalaBuffer]] - */ - implicit def asScalaBufferConverter[A](l : ju.List[A]): AsScala[mutable.Buffer[A]] = - new AsScala(asScalaBuffer(l)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Set` to a Scala mutable `Set`. - * @see [[asScalaSet]] - */ - implicit def asScalaSetConverter[A](s : ju.Set[A]): AsScala[mutable.Set[A]] = - new AsScala(asScalaSet(s)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Map` to a Scala mutable `Map`. - * @see [[mapAsScalaMap]] - */ - implicit def mapAsScalaMapConverter[K, V](m : ju.Map[K, V]): AsScala[mutable.Map[K, V]] = - new AsScala(mapAsScalaMap(m)) - - /** - * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap` to a Scala mutable `concurrent.Map`. - * @see [[mapAsScalaConcurrentMap]] - */ - implicit def mapAsScalaConcurrentMapConverter[K, V](m: juc.ConcurrentMap[K, V]): AsScala[concurrent.Map[K, V]] = - new AsScala(mapAsScalaConcurrentMap(m)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Dictionary` to a Scala mutable `Map`. - * @see [[dictionaryAsScalaMap]] - */ - implicit def dictionaryAsScalaMapConverter[K, V](p: ju.Dictionary[K, V]): AsScala[mutable.Map[K, V]] = - new AsScala(dictionaryAsScalaMap(p)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Properties` to a Scala mutable `Map[String, String]`. - * @see [[propertiesAsScalaMap]] - */ - implicit def propertiesAsScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] = - new AsScala(propertiesAsScalaMap(p)) - - - /** Generic class containing the `asJava` converter method */ - class AsJava[A](op: => A) { - /** Converts a Scala collection to the corresponding Java collection */ - def asJava: A = op - } - - /** Generic class containing the `asScala` converter method */ - class AsScala[A](op: => A) { - /** Converts a Java collection to the corresponding Scala collection */ - def asScala: A = op - } - - /** Generic class containing the `asJavaCollection` converter method */ - class AsJavaCollection[A](i: Iterable[A]) { - /** Converts a Scala `Iterable` to a Java `Collection` */ - def asJavaCollection: ju.Collection[A] = JavaConverters.asJavaCollection(i) - } - - /** Generic class containing the `asJavaEnumeration` converter method */ - class AsJavaEnumeration[A](i: Iterator[A]) { - /** Converts a Scala `Iterator` to a Java `Enumeration` */ - def asJavaEnumeration: ju.Enumeration[A] = JavaConverters.asJavaEnumeration(i) - } - - /** Generic class containing the `asJavaDictionary` converter method */ - class AsJavaDictionary[K, V](m : mutable.Map[K, V]) { - /** Converts a Scala `Map` to a Java `Dictionary` */ - def asJavaDictionary: ju.Dictionary[K, V] = JavaConverters.asJavaDictionary(m) - } -} diff --git a/tests/pos-special/stdlib/collection/LazyZipOps.scala b/tests/pos-special/stdlib/collection/LazyZipOps.scala deleted file mode 100644 index 1bb4173d219f..000000000000 --- a/tests/pos-special/stdlib/collection/LazyZipOps.scala +++ /dev/null @@ -1,423 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import scala.language.implicitConversions -import language.experimental.captureChecking - -/** Decorator representing lazily zipped pairs. - * - * @define coll pair - * @define willNotTerminateInf - * - * Note: will not terminate for infinite-sized collections. - */ -final class LazyZip2[+El1, +El2, C1] private[collection](src: C1, coll1: Iterable[El1]^, coll2: Iterable[El2]^) { - - /** Zips `that` iterable collection with an existing `LazyZip2`. The elements in each collection are - * not consumed until a strict operation is invoked on the returned `LazyZip3` decorator. - * - * @param that the iterable providing the third element of each eventual triple - * @tparam B the type of the third element in each eventual triple - * @return a decorator `LazyZip3` that allows strict operations to be performed on the lazily evaluated tuples or - * chained calls to `lazyZip`. Implicit conversion to `Iterable[(El1, El2, B)]` is also supported. - */ - def lazyZip[B](that: Iterable[B]^): LazyZip3[El1, El2, B, C1]^{this, that} = new LazyZip3(src, coll1, coll2, that) - - def map[B, C](f: (El1, El2) => B)(implicit bf: BuildFrom[C1, B, C]): C = { - bf.fromSpecific(src)(new AbstractView[B] { - def iterator = new AbstractIterator[B] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - def hasNext = elems1.hasNext && elems2.hasNext - def next() = f(elems1.next(), elems2.next()) - } - override def knownSize: Int = zipKnownSize - override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty - }) - } - - def flatMap[B, C](f: (El1, El2) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = { - bf.fromSpecific(src)(new AbstractView[B] { - def iterator = new AbstractIterator[B] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - private[this] var _current: Iterator[B] = Iterator.empty - private def current = { - while (!_current.hasNext && elems1.hasNext && elems2.hasNext) - _current = f(elems1.next(), elems2.next()).iterator - _current - } - def hasNext = current.hasNext - def next() = current.next() - } - override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty - }) - } - - def filter[C](p: (El1, El2) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2), C]): C = { - bf.fromSpecific(src)(new AbstractView[(El1, El2)] { - def iterator = new AbstractIterator[(El1, El2)] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - private[this] var _current: (El1, El2) = _ - private def current = { - while ((_current eq null) && elems1.hasNext && elems2.hasNext) { - val e1 = elems1.next() - val e2 = elems2.next() - if (p(e1, e2)) _current = (e1, e2) - } - _current - } - def hasNext = current ne null - def next() = { - val c = current - if (c ne null) { - _current = null - c - } else Iterator.empty.next() - } - } - override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = iterator.hasNext - }) - } - - def exists(p: (El1, El2) => Boolean): Boolean = { - val elems1 = coll1.iterator - val elems2 = coll2.iterator - var res = false - - while (!res && elems1.hasNext && elems2.hasNext) res = p(elems1.next(), elems2.next()) - - res - } - - def forall(p: (El1, El2) => Boolean): Boolean = !exists((el1, el2) => !p(el1, el2)) - - def foreach[U](f: (El1, El2) => U): Unit = { - val elems1 = coll1.iterator - val elems2 = coll2.iterator - - while (elems1.hasNext && elems2.hasNext) f(elems1.next(), elems2.next()) - } - - private def toIterable: View[(El1, El2)] = new AbstractView[(El1, El2)] { - def iterator = new AbstractIterator[(El1, El2)] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - def hasNext = elems1.hasNext && elems2.hasNext - def next() = (elems1.next(), elems2.next()) - } - override def knownSize: Int = zipKnownSize - override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty - } - - private def zipKnownSize: Int = { - val s1 = coll1.knownSize - if (s1 == 0) 0 else { - val s2 = coll2.knownSize - if (s2 == 0) 0 else s1 min s2 - } - } - - override def toString = s"$coll1.lazyZip($coll2)" -} - -object LazyZip2 { - implicit def lazyZip2ToIterable[El1, El2](zipped2: LazyZip2[El1, El2, _]): View[(El1, El2)] = zipped2.toIterable -} - - -/** Decorator representing lazily zipped triples. - * - * @define coll triple - * @define willNotTerminateInf - * - * Note: will not terminate for infinite-sized collections. - */ -final class LazyZip3[+El1, +El2, +El3, C1] private[collection](src: C1, - coll1: Iterable[El1]^, - coll2: Iterable[El2]^, - coll3: Iterable[El3]^) { - - /** Zips `that` iterable collection with an existing `LazyZip3`. The elements in each collection are - * not consumed until a strict operation is invoked on the returned `LazyZip4` decorator. - * - * @param that the iterable providing the fourth element of each eventual 4-tuple - * @tparam B the type of the fourth element in each eventual 4-tuple - * @return a decorator `LazyZip4` that allows strict operations to be performed on the lazily evaluated tuples. - * Implicit conversion to `Iterable[(El1, El2, El3, B)]` is also supported. - */ - def lazyZip[B](that: Iterable[B]^): LazyZip4[El1, El2, El3, B, C1]^{this, that} = new LazyZip4(src, coll1, coll2, coll3, that) - - def map[B, C](f: (El1, El2, El3) => B)(implicit bf: BuildFrom[C1, B, C]): C = { - bf.fromSpecific(src)(new AbstractView[B] { - def iterator = new AbstractIterator[B] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - private[this] val elems3 = coll3.iterator - def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext - def next() = f(elems1.next(), elems2.next(), elems3.next()) - } - override def knownSize: Int = zipKnownSize - override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty - }) - } - - def flatMap[B, C](f: (El1, El2, El3) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = { - bf.fromSpecific(src)(new AbstractView[B] { - def iterator = new AbstractIterator[B] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - private[this] val elems3 = coll3.iterator - private[this] var _current: Iterator[B] = Iterator.empty - private def current = { - while (!_current.hasNext && elems1.hasNext && elems2.hasNext && elems3.hasNext) - _current = f(elems1.next(), elems2.next(), elems3.next()).iterator - _current - } - def hasNext = current.hasNext - def next() = current.next() - } - override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = iterator.isEmpty - }) - } - - def filter[C](p: (El1, El2, El3) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2, El3), C]): C = { - bf.fromSpecific(src)(new AbstractView[(El1, El2, El3)] { - def iterator = new AbstractIterator[(El1, El2, El3)] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - private[this] val elems3 = coll3.iterator - private[this] var _current: (El1, El2, El3) = _ - private def current = { - while ((_current eq null) && elems1.hasNext && elems2.hasNext && elems3.hasNext) { - val e1 = elems1.next() - val e2 = elems2.next() - val e3 = elems3.next() - if (p(e1, e2, e3)) _current = (e1, e2, e3) - } - _current - } - def hasNext = current ne null - def next() = { - val c = current - if (c ne null) { - _current = null - c - } else Iterator.empty.next() - } - } - override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = iterator.isEmpty - }) - } - - def exists(p: (El1, El2, El3) => Boolean): Boolean = { - val elems1 = coll1.iterator - val elems2 = coll2.iterator - val elems3 = coll3.iterator - var res = false - - while (!res && elems1.hasNext && elems2.hasNext && elems3.hasNext) - res = p(elems1.next(), elems2.next(), elems3.next()) - - res - } - - def forall(p: (El1, El2, El3) => Boolean): Boolean = !exists((el1, el2, el3) => !p(el1, el2, el3)) - - def foreach[U](f: (El1, El2, El3) => U): Unit = { - val elems1 = coll1.iterator - val elems2 = coll2.iterator - val elems3 = coll3.iterator - - while (elems1.hasNext && elems2.hasNext && elems3.hasNext) - f(elems1.next(), elems2.next(), elems3.next()) - } - - private def toIterable: View[(El1, El2, El3)] = new AbstractView[(El1, El2, El3)] { - def iterator = new AbstractIterator[(El1, El2, El3)] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - private[this] val elems3 = coll3.iterator - def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext - def next() = (elems1.next(), elems2.next(), elems3.next()) - } - override def knownSize: Int = zipKnownSize - override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty - } - - private def zipKnownSize: Int = { - val s1 = coll1.knownSize - if (s1 == 0) 0 else { - val s2 = coll2.knownSize - if (s2 == 0) 0 else { - val s3 = coll3.knownSize - if (s3 == 0) 0 else s1 min s2 min s3 - } - } - } - - override def toString = s"$coll1.lazyZip($coll2).lazyZip($coll3)" -} - -object LazyZip3 { - implicit def lazyZip3ToIterable[El1, El2, El3](zipped3: LazyZip3[El1, El2, El3, _]): View[(El1, El2, El3)] = zipped3.toIterable -} - - - -/** Decorator representing lazily zipped 4-tuples. - * - * @define coll tuple - * @define willNotTerminateInf - * - * Note: will not terminate for infinite-sized collections. - */ -final class LazyZip4[+El1, +El2, +El3, +El4, C1] private[collection](src: C1, - coll1: Iterable[El1]^, - coll2: Iterable[El2]^, - coll3: Iterable[El3]^, - coll4: Iterable[El4]^) { - - def map[B, C](f: (El1, El2, El3, El4) => B)(implicit bf: BuildFrom[C1, B, C]): C = { - bf.fromSpecific(src)(new AbstractView[B] { - def iterator = new AbstractIterator[B] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - private[this] val elems3 = coll3.iterator - private[this] val elems4 = coll4.iterator - def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext - def next() = f(elems1.next(), elems2.next(), elems3.next(), elems4.next()) - } - override def knownSize: Int = zipKnownSize - override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty || coll4.isEmpty - }) - } - - def flatMap[B, C](f: (El1, El2, El3, El4) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = { - bf.fromSpecific(src)(new AbstractView[B] { - def iterator = new AbstractIterator[B] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - private[this] val elems3 = coll3.iterator - private[this] val elems4 = coll4.iterator - private[this] var _current: Iterator[B] = Iterator.empty - private def current = { - while (!_current.hasNext && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) - _current = f(elems1.next(), elems2.next(), elems3.next(), elems4.next()).iterator - _current - } - def hasNext = current.hasNext - def next() = current.next() - } - override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0 || coll4.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = iterator.isEmpty - }) - } - - def filter[C](p: (El1, El2, El3, El4) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2, El3, El4), C]): C = { - bf.fromSpecific(src)(new AbstractView[(El1, El2, El3, El4)] { - def iterator = new AbstractIterator[(El1, El2, El3, El4)] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - private[this] val elems3 = coll3.iterator - private[this] val elems4 = coll4.iterator - private[this] var _current: (El1, El2, El3, El4) = _ - private def current = { - while ((_current eq null) && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) { - val e1 = elems1.next() - val e2 = elems2.next() - val e3 = elems3.next() - val e4 = elems4.next() - if (p(e1, e2, e3, e4)) _current = (e1, e2, e3, e4) - } - _current - } - def hasNext = current ne null - def next() = { - val c = current - if (c ne null) { - _current = null - c - } else Iterator.empty.next() - } - } - override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0 || coll4.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = iterator.isEmpty - }) - } - - def exists(p: (El1, El2, El3, El4) => Boolean): Boolean = { - val elems1 = coll1.iterator - val elems2 = coll2.iterator - val elems3 = coll3.iterator - val elems4 = coll4.iterator - var res = false - - while (!res && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) - res = p(elems1.next(), elems2.next(), elems3.next(), elems4.next()) - - res - } - - def forall(p: (El1, El2, El3, El4) => Boolean): Boolean = !exists((el1, el2, el3, el4) => !p(el1, el2, el3, el4)) - - def foreach[U](f: (El1, El2, El3, El4) => U): Unit = { - val elems1 = coll1.iterator - val elems2 = coll2.iterator - val elems3 = coll3.iterator - val elems4 = coll4.iterator - - while (elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) - f(elems1.next(), elems2.next(), elems3.next(), elems4.next()) - } - - private def toIterable: View[(El1, El2, El3, El4)] = new AbstractView[(El1, El2, El3, El4)] { - def iterator = new AbstractIterator[(El1, El2, El3, El4)] { - private[this] val elems1 = coll1.iterator - private[this] val elems2 = coll2.iterator - private[this] val elems3 = coll3.iterator - private[this] val elems4 = coll4.iterator - def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext - def next() = (elems1.next(), elems2.next(), elems3.next(), elems4.next()) - } - override def knownSize: Int = zipKnownSize - override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty || coll4.isEmpty - } - - private def zipKnownSize: Int = { - val s1 = coll1.knownSize - if (s1 == 0) 0 else { - val s2 = coll2.knownSize - if (s2 == 0) 0 else { - val s3 = coll3.knownSize - if (s3 == 0) 0 else { - val s4 = coll4.knownSize - if (s4 == 0) 0 else s1 min s2 min s3 min s4 - } - } - } - } - - override def toString = s"$coll1.lazyZip($coll2).lazyZip($coll3).lazyZip($coll4)" -} - -object LazyZip4 { - implicit def lazyZip4ToIterable[El1, El2, El3, El4](zipped4: LazyZip4[El1, El2, El3, El4, _]): View[(El1, El2, El3, El4)] = - zipped4.toIterable -} diff --git a/tests/pos-special/stdlib/collection/Map.scala b/tests/pos-special/stdlib/collection/Map.scala index 8ab25a3c13e0..ef4f915ea573 100644 --- a/tests/pos-special/stdlib/collection/Map.scala +++ b/tests/pos-special/stdlib/collection/Map.scala @@ -18,15 +18,13 @@ import scala.collection.generic.DefaultSerializable import scala.collection.mutable.StringBuilder import scala.util.hashing.MurmurHash3 import language.experimental.captureChecking -import caps.unsafe.unsafeAssumePure /** Base Map type */ trait Map[K, +V] extends Iterable[(K, V)] with MapOps[K, V, Map, Map[K, V]] with MapFactoryDefaults[K, V, Map, Iterable] - with Equals - with Pure { + with Equals { def mapFactory: scala.collection.MapFactory[Map] = Map @@ -104,9 +102,8 @@ trait Map[K, +V] trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] extends IterableOps[(K, V), Iterable, C] with PartialFunction[K, V] { - this: MapOps[K, V, CC, C]^ => - override def view: MapView[K, V]^{this} = new MapView.Id(this) + override def view: MapView[K, V] = new MapView.Id(this) /** Returns a [[Stepper]] for the keys of this map. See method [[stepper]]. */ def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S = { @@ -255,7 +252,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * the predicate `p`. The resulting map wraps the original map without copying any elements. */ @deprecated("Use .view.filterKeys(f). A future version will include a strict version of this method (for now, .view.filterKeys(p).toMap).", "2.13.0") - def filterKeys(p: K => Boolean): MapView[K, V]^{this, p} = new MapView.FilterKeys(this, p) + def filterKeys(p: K => Boolean): MapView[K, V] = new MapView.FilterKeys(this, p) /** Transforms this map by applying a function to every retrieved value. * @param f the function used to transform values of this map. @@ -263,7 +260,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * to `f(this(key))`. The resulting map wraps the original map without copying any elements. */ @deprecated("Use .view.mapValues(f). A future version will include a strict version of this method (for now, .view.mapValues(f).toMap).", "2.13.0") - def mapValues[W](f: V => W): MapView[K, W]^{this, f} = new MapView.MapValues(this, f) + def mapValues[W](f: V => W): MapView[K, W] = new MapView.MapValues(this, f) /** Defines the default value computation for the map, * returned when a key is not found @@ -356,7 +353,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] @deprecated("Consider requiring an immutable Map.", "2.13.0") @`inline` def -- (keys: IterableOnce[K]^): C = { lazy val keysSet = keys.iterator.to(immutable.Set) - fromSpecific(this.view.filterKeys(k => !keysSet.contains(k))).unsafeAssumePure + fromSpecific(this.view.filterKeys(k => !keysSet.contains(k))) } @deprecated("Use ++ instead of ++: for collections of type Iterable", "2.13.0") @@ -377,17 +374,17 @@ object MapOps { */ @SerialVersionUID(3L) class WithFilter[K, +V, +IterableCC[_], +CC[_, _] <: IterableOps[_, AnyConstr, _]]( - self: (MapOps[K, V, CC, _] with IterableOps[(K, V), IterableCC, _])^, + self: MapOps[K, V, CC, _] with IterableOps[(K, V), IterableCC, _], p: ((K, V)) => Boolean ) extends IterableOps.WithFilter[(K, V), IterableCC](self, p) with Serializable { - def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2]^{this, f} = + def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = self.mapFactory.from(new View.Map(filtered, f)) - def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2]^{this, f} = + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = self.mapFactory.from(new View.FlatMap(filtered, f)) - override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, CC]^{this, q} = + override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, CC]^{p, q} = new WithFilter[K, V, IterableCC, CC](self, (kv: (K, V)) => p(kv) && q(kv)) } diff --git a/tests/pos-special/stdlib/collection/MapView.scala b/tests/pos-special/stdlib/collection/MapView.scala deleted file mode 100644 index ac9e88466052..000000000000 --- a/tests/pos-special/stdlib/collection/MapView.scala +++ /dev/null @@ -1,196 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import scala.annotation.nowarn -import scala.collection.MapView.SomeMapOps -import scala.collection.mutable.Builder -import language.experimental.captureChecking -import caps.unsafe.unsafeAssumePure - -trait MapView[K, +V] - extends MapOps[K, V, ({ type l[X, Y] = View[(X, Y)] })#l, View[(K, V)]] - with View[(K, V)] { - this: MapView[K, V]^ => - - override def view: MapView[K, V]^{this} = this - - // Ideally this returns a `View`, but bincompat - /** Creates a view over all keys of this map. - * - * @return the keys of this map as a view. - */ - override def keys: Iterable[K]^{this} = new MapView.Keys(this) - - // Ideally this returns a `View`, but bincompat - /** Creates a view over all values of this map. - * - * @return the values of this map as a view. - */ - override def values: Iterable[V]^{this} = new MapView.Values(this) - - /** Filters this map by retaining only keys satisfying a predicate. - * @param p the predicate used to test keys - * @return an immutable map consisting only of those key value pairs of this map where the key satisfies - * the predicate `p`. The resulting map wraps the original map without copying any elements. - */ - override def filterKeys(p: K => Boolean): MapView[K, V]^{this, p} = new MapView.FilterKeys(this, p) - - /** Transforms this map by applying a function to every retrieved value. - * @param f the function used to transform values of this map. - * @return a map view which maps every key of this map - * to `f(this(key))`. The resulting map wraps the original map without copying any elements. - */ - override def mapValues[W](f: V => W): MapView[K, W]^{this, f} = new MapView.MapValues(this, f) - - override def filter(pred: ((K, V)) => Boolean): MapView[K, V]^{this, pred} = new MapView.Filter(this, false, pred) - - override def filterNot(pred: ((K, V)) => Boolean): MapView[K, V]^{this, pred} = new MapView.Filter(this, true, pred) - - override def partition(p: ((K, V)) => Boolean): (MapView[K, V]^{this, p}, MapView[K, V]^{this, p}) = (filter(p), filterNot(p)) - - override def tapEach[U](f: ((K, V)) => U): MapView[K, V]^{this, f} = new MapView.TapEach(this, f) - - def mapFactory: MapViewFactory = MapView - - override def empty: MapView[K, V] = mapFactory.empty - - override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, View, ({ type l[X, Y] = View[(X, Y)] })#l]^{this, p} = new MapOps.WithFilter(this, p) - - override def toString: String = super[View].toString - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix: String = "MapView" -} - -object MapView extends MapViewFactory { - - /** An `IterableOps` whose collection type and collection type constructor are unknown */ - type SomeIterableConstr[X, Y] = IterableOps[_, AnyConstr, _] - /** A `MapOps` whose collection type and collection type constructor are (mostly) unknown */ - type SomeMapOps[K, +V] = MapOps[K, V, SomeIterableConstr, _] - - @SerialVersionUID(3L) - object EmptyMapView extends AbstractMapView[Any, Nothing] { - // !!! cc problem: crash when we replace the line with - // private val EmptyMapView: MapView[Any, Nothing] = new AbstractMapView[Any, Nothing] { - override def get(key: Any): Option[Nothing] = None - override def iterator: Iterator[Nothing] = Iterator.empty[Nothing] - override def knownSize: Int = 0 - override def isEmpty: Boolean = true - override def filterKeys(p: Any => Boolean): MapView[Any, Nothing] = this - override def mapValues[W](f: Nothing => W): MapView[Any, Nothing] = this - override def filter(pred: ((Any, Nothing)) => Boolean): MapView[Any, Nothing] = this - override def filterNot(pred: ((Any, Nothing)) => Boolean): MapView[Any, Nothing] = this - override def partition(p: ((Any, Nothing)) => Boolean): (MapView[Any, Nothing], MapView[Any, Nothing]) = (this, this) - } - - @SerialVersionUID(3L) - class Id[K, +V](underlying: SomeMapOps[K, V]^) extends AbstractMapView[K, V] { - def get(key: K): Option[V] = underlying.get(key) - def iterator: Iterator[(K, V)]^{this} = underlying.iterator - override def knownSize: Int = underlying.knownSize - override def isEmpty: Boolean = underlying.isEmpty - } - - // Ideally this is public, but bincompat - @SerialVersionUID(3L) - private class Keys[K](underlying: SomeMapOps[K, _]^) extends AbstractView[K] { - def iterator: Iterator[K]^{this} = underlying.keysIterator - override def knownSize: Int = underlying.knownSize - override def isEmpty: Boolean = underlying.isEmpty - } - - // Ideally this is public, but bincompat - @SerialVersionUID(3L) - private class Values[+V](underlying: SomeMapOps[_, V]^) extends AbstractView[V] { - def iterator: Iterator[V]^{this} = underlying.valuesIterator - override def knownSize: Int = underlying.knownSize - override def isEmpty: Boolean = underlying.isEmpty - } - - @SerialVersionUID(3L) - class MapValues[K, +V, +W](underlying: SomeMapOps[K, V]^, f: V => W) extends AbstractMapView[K, W] { - def iterator: Iterator[(K, W)]^{this} = underlying.iterator.map(kv => (kv._1, f(kv._2))) - def get(key: K): Option[W] = underlying.get(key).map(f) - override def knownSize: Int = underlying.knownSize - override def isEmpty: Boolean = underlying.isEmpty - } - - @SerialVersionUID(3L) - class FilterKeys[K, +V](underlying: SomeMapOps[K, V]^, p: K => Boolean) extends AbstractMapView[K, V] { - def iterator: Iterator[(K, V)]^{this} = underlying.iterator.filter { case (k, _) => p(k) } - def get(key: K): Option[V] = if (p(key)) underlying.get(key) else None - override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = iterator.isEmpty - } - - @SerialVersionUID(3L) - class Filter[K, +V](underlying: SomeMapOps[K, V]^, isFlipped: Boolean, p: ((K, V)) => Boolean) extends AbstractMapView[K, V] { - def iterator: Iterator[(K, V)]^{this} = underlying.iterator.filterImpl(p, isFlipped) - def get(key: K): Option[V] = underlying.get(key) match { - case s @ Some(v) if p((key, v)) != isFlipped => s - case _ => None - } - override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize - override def isEmpty: Boolean = iterator.isEmpty - } - - @SerialVersionUID(3L) - class TapEach[K, +V, +U](underlying: SomeMapOps[K, V]^, f: ((K, V)) => U) extends AbstractMapView[K, V] { - override def get(key: K): Option[V] = { - underlying.get(key) match { - case s @ Some(v) => - f((key, v)) - s - case None => None - } - } - override def iterator: Iterator[(K, V)]^{this} = underlying.iterator.tapEach(f) - override def knownSize: Int = underlying.knownSize - override def isEmpty: Boolean = underlying.isEmpty - } - - override def newBuilder[sealed X, sealed Y]: Builder[(X, Y), MapView[X, Y]] = mutable.HashMap.newBuilder[X, Y].mapResult(_.view) - - override def empty[K, V]: MapView[K, V] = EmptyMapView.asInstanceOf[MapView[K, V]] - - override def from[K, V](it: IterableOnce[(K, V)]^): View[(K, V)] = - View.from(it).unsafeAssumePure - // unsafeAssumePure needed here since MapViewFactory inherits from MapFactory, - // and the latter assumes maps are strict, so from's result captures nothing. - - override def from[K, V](it: SomeMapOps[K, V]^): MapView[K, V]^{it} = it match { - case mv: MapView[K, V] => mv - case other => new MapView.Id(other) - } - - override def apply[K, V](elems: (K, V)*): MapView[K, V] = from(elems.toMap) -} - -trait MapViewFactory extends collection.MapFactory[({ type l[X, Y] = View[(X, Y)]})#l] { - - def newBuilder[X, Y]: Builder[(X, Y), MapView[X, Y]] - - def empty[X, Y]: MapView[X, Y] - - def from[K, V](it: SomeMapOps[K, V]^): MapView[K, V]^{it} - - override def apply[K, V](elems: (K, V)*): MapView[K, V] = from(elems.toMap) -} - -/** Explicit instantiation of the `MapView` trait to reduce class file size in subclasses. */ -@SerialVersionUID(3L) -abstract class AbstractMapView[K, +V] extends AbstractView[(K, V)] with MapView[K, V]: - this: AbstractMapView[K, V]^ => - diff --git a/tests/pos-special/stdlib/collection/Searching.scala b/tests/pos-special/stdlib/collection/Searching.scala deleted file mode 100644 index f5139422e24c..000000000000 --- a/tests/pos-special/stdlib/collection/Searching.scala +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import scala.language.implicitConversions -import scala.collection.generic.IsSeq -import language.experimental.captureChecking - -object Searching { - - /** The result of performing a search on a sorted sequence - * - * Example usage: - * - * {{{ - * val list = List(1, 3, 4, 5) // list must be sorted before searching - * list.search(4) // Found(2) - * list.search(2) // InsertionPoint(1) - * }}} - * - * */ - sealed abstract class SearchResult { - /** The index corresponding to the element searched for in the sequence, if it was found, - * or the index where the element would be inserted in the sequence, if it was not in the sequence */ - def insertionPoint: Int - } - - /** The result of performing a search on a sorted sequence, where the element was found. - * - * @param foundIndex the index corresponding to the element searched for in the sequence - */ - case class Found(foundIndex: Int) extends SearchResult { - override def insertionPoint: Int = foundIndex - } - - /** The result of performing a search on a sorted sequence, where the element was not found - * - * @param insertionPoint the index where the element would be inserted in the sequence - */ - case class InsertionPoint(insertionPoint: Int) extends SearchResult - - @deprecated("Search methods are defined directly on SeqOps and do not require scala.collection.Searching any more", "2.13.0") - class SearchImpl[Repr, A](private val coll: SeqOps[A, AnyConstr, _]) extends AnyVal - - @deprecated("Search methods are defined directly on SeqOps and do not require scala.collection.Searching any more", "2.13.0") - implicit def search[Repr, A](coll: Repr)(implicit fr: IsSeq[Repr]): SearchImpl[Repr, fr.A] = - new SearchImpl(fr.conversion(coll)) -} diff --git a/tests/pos-special/stdlib/collection/Seq.scala b/tests/pos-special/stdlib/collection/Seq.scala index 365a1db1b849..caabf6fa6436 100644 --- a/tests/pos-special/stdlib/collection/Seq.scala +++ b/tests/pos-special/stdlib/collection/Seq.scala @@ -18,7 +18,6 @@ import Searching.{Found, InsertionPoint, SearchResult} import scala.annotation.nowarn import language.experimental.captureChecking import caps.unsafe.unsafeAssumePure -import scala.annotation.unchecked.uncheckedCaptures /** Base trait for sequence collections * @@ -78,12 +77,10 @@ object Seq extends SeqFactory.Delegate[Seq](immutable.Seq) * @define coll sequence * @define Coll `Seq` */ -trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self => +trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => override def view: SeqView[A] = new SeqView.Id[A](this) - def iterableFactory: FreeSeqFactory[CC] - /** Get the element at the specified index. This operation is provided for convenience in `Seq`. It should * not be assumed to be efficient unless you have an `IndexedSeq`. */ @throws[IndexOutOfBoundsException] @@ -237,7 +234,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self => * * @return an iterator yielding the elements of this $coll in reversed order */ - override def reverseIterator: Iterator[A] = reversed.iterator + def reverseIterator: Iterator[A] = reversed.iterator /** Tests whether this $coll contains the given sequence at a given index. * @@ -601,8 +598,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self => if (!hasNext) Iterator.empty.next() - val forcedElms = new mutable.ArrayBuffer[A @uncheckedCaptures](elms.size) ++= elms - // uncheckedCaptures OK since used only locally + val forcedElms = new mutable.ArrayBuffer[A](elms.size) ++= elms val result = (newSpecificBuilder ++= forcedElms).result() var i = idxs.length - 2 while(i >= 0 && idxs(i) >= idxs(i+1)) @@ -893,7 +889,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self => * part of the result, but any following occurrences will. */ def diff[B >: A](that: Seq[B]): C = { - val occ = occCounts[B @uncheckedCaptures](that) + val occ = occCounts(that) fromSpecific(iterator.filter { x => var include = false occ.updateWith(x) { @@ -918,7 +914,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self => * in the result, but any following occurrences will be omitted. */ def intersect[B >: A](that: Seq[B]): C = { - val occ = occCounts[B @uncheckedCaptures](that) + val occ = occCounts(that) fromSpecific(iterator.filter { x => var include = true occ.updateWith(x) { @@ -966,7 +962,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self => iterableFactory.from(new View.Updated(this, index, elem)) } - protected[collection] def occCounts[sealed B](sq: Seq[B]): mutable.Map[B, Int] = { + protected[collection] def occCounts[B](sq: Seq[B]): mutable.Map[B, Int] = { val occ = new mutable.HashMap[B, Int]() for (y <- sq) occ.updateWith(y) { case None => Some(1) diff --git a/tests/pos-special/stdlib/collection/SeqMap.scala b/tests/pos-special/stdlib/collection/SeqMap.scala deleted file mode 100644 index a7f2c629b61d..000000000000 --- a/tests/pos-special/stdlib/collection/SeqMap.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -import language.experimental.captureChecking - -import scala.annotation.nowarn - -/** - * A generic trait for ordered maps. Concrete classes have to provide - * functionality for the abstract methods in `SeqMap`. - * - * Note that when checking for equality [[SeqMap]] does not take into account - * ordering. - * - * @tparam K the type of the keys contained in this linked map. - * @tparam V the type of the values associated with the keys in this linked map. - * @define coll immutable seq map - * @define Coll `immutable.SeqMap` - */ - -trait SeqMap[K, +V] extends Map[K, V] - with MapOps[K, V, SeqMap, SeqMap[K, V]] - with MapFactoryDefaults[K, V, SeqMap, Iterable] { - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix: String = "SeqMap" - - override def mapFactory: MapFactory[SeqMap] = SeqMap -} - -object SeqMap extends MapFactory.Delegate[immutable.SeqMap](immutable.SeqMap) - diff --git a/tests/pos-special/stdlib/collection/SeqView.scala b/tests/pos-special/stdlib/collection/SeqView.scala deleted file mode 100644 index a4ca1143f8b4..000000000000 --- a/tests/pos-special/stdlib/collection/SeqView.scala +++ /dev/null @@ -1,232 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import scala.annotation.nowarn -import language.experimental.captureChecking -import caps.unsafe.unsafeAssumePure -import scala.annotation.unchecked.uncheckedCaptures - -/** !!! Scala 2 difference: Need intermediate trait SeqViewOps to collect the - * necessary functionality over which SeqViews are defined, and at the same - * time allowing impure operations. Scala 2 uses SeqOps here, but SeqOps is - * pure, whereas SeqViews are Iterables which can be impure (for instance, - * mapping a SeqView with an impure function gives an impure view). - */ -trait SeqViewOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { - self: SeqViewOps[A, CC, C]^ => - - def length: Int - def apply(x: Int): A - def appended[B >: A](elem: B): CC[B]^{this} - def prepended[B >: A](elem: B): CC[B]^{this} - def reverse: C^{this} - def sorted[B >: A](implicit ord: Ordering[B]): C^{this} - - def reverseIterator: Iterator[A]^{this} = reversed.iterator -} - -trait SeqView[+A] extends SeqViewOps[A, View, View[A]] with View[A] { - self: SeqView[A]^ => - - override def view: SeqView[A]^{this} = this - - override def map[B](f: A => B): SeqView[B]^{this, f} = new SeqView.Map(this, f) - override def appended[B >: A](elem: B): SeqView[B]^{this} = new SeqView.Appended(this, elem) - override def prepended[B >: A](elem: B): SeqView[B]^{this} = new SeqView.Prepended(elem, this) - override def reverse: SeqView[A]^{this} = new SeqView.Reverse(this) - override def take(n: Int): SeqView[A]^{this} = new SeqView.Take(this, n) - override def drop(n: Int): SeqView[A]^{this} = new SeqView.Drop(this, n) - override def takeRight(n: Int): SeqView[A]^{this} = new SeqView.TakeRight(this, n) - override def dropRight(n: Int): SeqView[A]^{this} = new SeqView.DropRight(this, n) - override def tapEach[U](f: A => U): SeqView[A]^{this, f} = new SeqView.Map(this, { (a: A) => f(a); a }) - - def concat[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B]^{this} = new SeqView.Concat(this, suffix) - def appendedAll[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B]^{this} = new SeqView.Concat(this, suffix) - def prependedAll[B >: A](prefix: SeqView.SomeSeqOps[B]): SeqView[B]^{this} = new SeqView.Concat(prefix, this) - - override def sorted[B >: A](implicit ord: Ordering[B]): SeqView[A]^{this} = new SeqView.Sorted(this, ord) - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix: String = "SeqView" -} - -object SeqView { - - /** A `SeqOps` whose collection type and collection type constructor are unknown */ - private type SomeSeqOps[+A] = SeqViewOps[A, AnyConstr, _] - - /** A view that doesn’t apply any transformation to an underlying sequence */ - @SerialVersionUID(3L) - class Id[+A](underlying: SomeSeqOps[A]^) extends AbstractSeqView[A] { - def apply(idx: Int): A = underlying.apply(idx) - def length: Int = underlying.length - def iterator: Iterator[A]^{this} = underlying.iterator - override def knownSize: Int = underlying.knownSize - override def isEmpty: Boolean = underlying.isEmpty - } - - @SerialVersionUID(3L) - class Map[+A, +B](underlying: SomeSeqOps[A]^, f: A => B) extends View.Map[A, B](underlying, f) with SeqView[B] { - def apply(idx: Int): B = f(underlying(idx)) - def length: Int = underlying.length - } - - @SerialVersionUID(3L) - class Appended[+A](underlying: SomeSeqOps[A]^, elem: A) extends View.Appended(underlying, elem) with SeqView[A] { - def apply(idx: Int): A = if (idx == underlying.length) elem else underlying(idx) - def length: Int = underlying.length + 1 - } - - @SerialVersionUID(3L) - class Prepended[+A](elem: A, underlying: SomeSeqOps[A]^) extends View.Prepended(elem, underlying) with SeqView[A] { - def apply(idx: Int): A = if (idx == 0) elem else underlying(idx - 1) - def length: Int = underlying.length + 1 - } - - @SerialVersionUID(3L) - class Concat[A](prefix: SomeSeqOps[A]^, suffix: SomeSeqOps[A]^) extends View.Concat[A](prefix, suffix) with SeqView[A] { - def apply(idx: Int): A = { - val l = prefix.length - if (idx < l) prefix(idx) else suffix(idx - l) - } - def length: Int = prefix.length + suffix.length - } - - @SerialVersionUID(3L) - class Reverse[A](underlying: SomeSeqOps[A]^) extends AbstractSeqView[A] { - def apply(i: Int) = underlying.apply(size - 1 - i) - def length = underlying.size - def iterator: Iterator[A]^{this} = underlying.reverseIterator - override def knownSize: Int = underlying.knownSize - override def isEmpty: Boolean = underlying.isEmpty - } - - @SerialVersionUID(3L) - class Take[+A](underlying: SomeSeqOps[A]^, n: Int) extends View.Take(underlying, n) with SeqView[A] { - def apply(idx: Int): A = if (idx < n) { - underlying(idx) - } else { - throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${if (underlying.knownSize >= 0) knownSize - 1 else "unknown"})") - } - def length: Int = underlying.length min normN - } - - @SerialVersionUID(3L) - class TakeRight[+A](underlying: SomeSeqOps[A]^, n: Int) extends View.TakeRight(underlying, n) with SeqView[A] { - private[this] val delta = (underlying.size - (n max 0)) max 0 - def length = underlying.size - delta - @throws[IndexOutOfBoundsException] - def apply(i: Int) = underlying.apply(i + delta) - } - - @SerialVersionUID(3L) - class Drop[A](underlying: SomeSeqOps[A]^, n: Int) extends View.Drop[A](underlying, n) with SeqView[A] { - def length = (underlying.size - normN) max 0 - @throws[IndexOutOfBoundsException] - def apply(i: Int) = underlying.apply(i + normN) - override def drop(n: Int): SeqView[A]^{this} = new Drop(underlying, this.n + n) - } - - @SerialVersionUID(3L) - class DropRight[A](underlying: SomeSeqOps[A]^, n: Int) extends View.DropRight[A](underlying, n) with SeqView[A] { - private[this] val len = (underlying.size - (n max 0)) max 0 - def length = len - @throws[IndexOutOfBoundsException] - def apply(i: Int) = underlying.apply(i) - } - - @SerialVersionUID(3L) - class Sorted[A, B >: A] private (private[this] var underlying: SomeSeqOps[A]^, - private[this] val len: Int, - ord: Ordering[B]) - extends SeqView[A] { - outer: Sorted[A, B]^ => - - // force evaluation immediately by calling `length` so infinite collections - // hang on `sorted`/`sortWith`/`sortBy` rather than on arbitrary method calls - def this(underlying: SomeSeqOps[A]^, ord: Ordering[B]) = this(underlying, underlying.length, ord) - - @SerialVersionUID(3L) - private[this] class ReverseSorted extends SeqView[A] { - private[this] lazy val _reversed = new SeqView.Reverse(_sorted) - - def apply(i: Int): A = _reversed.apply(i) - def length: Int = len - def iterator: Iterator[A]^{this} = Iterator.empty ++ _reversed.iterator // very lazy - override def knownSize: Int = len - override def isEmpty: Boolean = len == 0 - override def to[C1](factory: Factory[A, C1]): C1 = _reversed.to(factory) - override def reverse: SeqView[A]^{outer} = outer - override protected def reversed: Iterable[A] = outer.unsafeAssumePure - - override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A]^{this} = - if (ord1 == Sorted.this.ord) outer.unsafeAssumePure - else if (ord1.isReverseOf(Sorted.this.ord)) this - else new Sorted(elems, len, ord1) - } - - @volatile private[this] var evaluated = false - - private[this] lazy val _sorted: Seq[A] = { - val res = { - val len = this.len - if (len == 0) Nil - else if (len == 1) List(underlying.head) - else { - val arr = new Array[Any](len) // Array[Any] =:= Array[AnyRef] - underlying.copyToArray(arr) - java.util.Arrays.sort(arr.asInstanceOf[Array[AnyRef]], ord.asInstanceOf[Ordering[AnyRef]]) - // casting the Array[AnyRef] to Array[A] and creating an ArraySeq from it - // is safe because: - // - the ArraySeq is immutable, and items that are not of type A - // cannot be added to it - // - we know it only contains items of type A (and if this collection - // contains items of another type, we'd get a CCE anyway) - // - the cast doesn't actually do anything in the runtime because the - // type of A is not known and Array[_] is Array[AnyRef] - immutable.ArraySeq.unsafeWrapArray(arr.asInstanceOf[Array[A @uncheckedCaptures]]) - } - } - evaluated = true - underlying = null - res - } - - private[this] def elems: SomeSeqOps[A]^{this} = { - val orig = underlying - if (evaluated) _sorted else orig - } - - def apply(i: Int): A = _sorted.apply(i) - def length: Int = len - def iterator: Iterator[A]^{this} = Iterator.empty ++ _sorted.iterator // very lazy - override def knownSize: Int = len - override def isEmpty: Boolean = len == 0 - override def to[C1](factory: Factory[A, C1]): C1 = _sorted.to(factory) - override def reverse: SeqView[A] = new ReverseSorted - // we know `_sorted` is either tiny or has efficient random access, - // so this is acceptable for `reversed` - override protected def reversed: Iterable[A] = new ReverseSorted - - override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A]^{this} = - if (ord1 == this.ord) this - else if (ord1.isReverseOf(this.ord)) reverse - else new Sorted(elems, len, ord1) - } -} - -/** Explicit instantiation of the `SeqView` trait to reduce class file size in subclasses. */ -@SerialVersionUID(3L) -abstract class AbstractSeqView[+A] extends AbstractView[A] with SeqView[A] diff --git a/tests/pos-special/stdlib/collection/Set.scala b/tests/pos-special/stdlib/collection/Set.scala deleted file mode 100644 index a9c279b82a49..000000000000 --- a/tests/pos-special/stdlib/collection/Set.scala +++ /dev/null @@ -1,271 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import scala.util.hashing.MurmurHash3 -import java.lang.String - -import scala.annotation.nowarn -import language.experimental.captureChecking - -/** Base trait for set collections. - */ -trait Set[A] - extends Iterable[A] - with SetOps[A, Set, Set[A]] - with Equals - with IterableFactoryDefaults[A, Set] - with Pure { - self: Set[A] => - - def canEqual(that: Any) = true - - /** - * Equality of sets is implemented using the lookup method [[contains]]. This method returns `true` if - * - the argument `that` is a `Set`, - * - the two sets have the same [[size]], and - * - for every `element` this set, `other.contains(element) == true`. - * - * The implementation of `equals` checks the [[canEqual]] method, so subclasses of `Set` can narrow down the equality - * to specific set types. The `Set` implementations in the standard library can all be compared, their `canEqual` - * methods return `true`. - * - * Note: The `equals` method only respects the equality laws (symmetry, transitivity) if the two sets use the same - * element equivalence function in their lookup operation. For example, the element equivalence operation in a - * [[scala.collection.immutable.TreeSet]] is defined by its ordering. Comparing a `TreeSet` with a `HashSet` leads - * to unexpected results if `ordering.equiv(e1, e2)` (used for lookup in `TreeSet`) is different from `e1 == e2` - * (used for lookup in `HashSet`). - * - * {{{ - * scala> import scala.collection.immutable._ - * scala> val ord: Ordering[String] = _ compareToIgnoreCase _ - * - * scala> TreeSet("A")(ord) == HashSet("a") - * val res0: Boolean = false - * - * scala> HashSet("a") == TreeSet("A")(ord) - * val res1: Boolean = true - * }}} - * - * - * @param that The set to which this set is compared - * @return `true` if the two sets are equal according to the description - */ - override def equals(that: Any): Boolean = - (this eq that.asInstanceOf[AnyRef]) || (that match { - case set: Set[A @unchecked] if set.canEqual(this) => - (this.size == set.size) && { - try this.subsetOf(set) - catch { case _: ClassCastException => false } // PR #9565 / scala/bug#12228 - } - case _ => - false - }) - - override def hashCode(): Int = MurmurHash3.setHash(this) - - override def iterableFactory: IterableFactory[Set] = Set - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix: String = "Set" - - override def toString(): String = super[Iterable].toString() // Because `Function1` overrides `toString` too -} - -/** Base trait for set operations - * - * @define coll set - * @define Coll `Set` - */ -trait SetOps[A, +CC[_], +C <: SetOps[A, CC, C]] - extends IterableOps[A, CC, C], (A -> Boolean) { self => - - def contains(elem: A): Boolean - - /** Tests if some element is contained in this set. - * - * This method is equivalent to `contains`. It allows sets to be interpreted as predicates. - * @param elem the element to test for membership. - * @return `true` if `elem` is contained in this set, `false` otherwise. - */ - @`inline` final def apply(elem: A): Boolean = this.contains(elem) - - /** Tests whether this set is a subset of another set. - * - * @param that the set to test. - * @return `true` if this set is a subset of `that`, i.e. if - * every element of this set is also an element of `that`. - */ - def subsetOf(that: Set[A]): Boolean = this.forall(that) - - /** An iterator over all subsets of this set of the given size. - * If the requested size is impossible, an empty iterator is returned. - * - * @param len the size of the subsets. - * @return the iterator. - */ - def subsets(len: Int): Iterator[C] = { - if (len < 0 || len > size) Iterator.empty - else new SubsetsItr(this.to(IndexedSeq), len) - } - - /** An iterator over all subsets of this set. - * - * @return the iterator. - */ - def subsets(): Iterator[C] = new AbstractIterator[C] { - private[this] val elms = SetOps.this.to(IndexedSeq) - private[this] var len = 0 - private[this] var itr: Iterator[C] = Iterator.empty - - def hasNext = len <= elms.size || itr.hasNext - def next() = { - if (!itr.hasNext) { - if (len > elms.size) Iterator.empty.next() - else { - itr = new SubsetsItr(elms, len) - len += 1 - } - } - - itr.next() - } - } - - /** An Iterator including all subsets containing exactly len elements. - * If the elements in 'This' type is ordered, then the subsets will also be in the same order. - * ListSet(1,2,3).subsets => {{1},{2},{3},{1,2},{1,3},{2,3},{1,2,3}} - * - * $willForceEvaluation - * - */ - private class SubsetsItr(elms: IndexedSeq[A], len: Int) extends AbstractIterator[C] { - private[this] val idxs = Array.range(0, len+1) - private[this] var _hasNext = true - idxs(len) = elms.size - - def hasNext = _hasNext - @throws[NoSuchElementException] - def next(): C = { - if (!hasNext) Iterator.empty.next() - - val buf = newSpecificBuilder - idxs.slice(0, len) foreach (idx => buf += elms(idx)) - val result = buf.result() - - var i = len - 1 - while (i >= 0 && idxs(i) == idxs(i+1)-1) i -= 1 - - if (i < 0) _hasNext = false - else { - idxs(i) += 1 - for (j <- (i+1) until len) - idxs(j) = idxs(j-1) + 1 - } - - result - } - } - - /** Computes the intersection between this set and another set. - * - * @param that the set to intersect with. - * @return a new set consisting of all elements that are both in this - * set and in the given set `that`. - */ - def intersect(that: Set[A]): C = this.filter(that) - - /** Alias for `intersect` */ - @`inline` final def & (that: Set[A]): C = intersect(that) - - /** Computes the difference of this set and another set. - * - * @param that the set of elements to exclude. - * @return a set containing those elements of this - * set that are not also contained in the given set `that`. - */ - def diff(that: Set[A]): C - - /** Alias for `diff` */ - @`inline` final def &~ (that: Set[A]): C = this diff that - - @deprecated("Consider requiring an immutable Set", "2.13.0") - def -- (that: IterableOnce[A]): C = { - val toRemove = that.iterator.to(immutable.Set) - fromSpecific(view.filterNot(toRemove)) - } - - @deprecated("Consider requiring an immutable Set or fall back to Set.diff", "2.13.0") - def - (elem: A): C = diff(Set(elem)) - - @deprecated("Use &- with an explicit collection argument instead of - with varargs", "2.13.0") - def - (elem1: A, elem2: A, elems: A*): C = diff(elems.toSet + elem1 + elem2) - - /** Creates a new $coll by adding all elements contained in another collection to this $coll, omitting duplicates. - * - * This method takes a collection of elements and adds all elements, omitting duplicates, into $coll. - * - * Example: - * {{{ - * scala> val a = Set(1, 2) concat Set(2, 3) - * a: scala.collection.immutable.Set[Int] = Set(1, 2, 3) - * }}} - * - * @param that the collection containing the elements to add. - * @return a new $coll with the given elements added, omitting duplicates. - */ - def concat(that: collection.IterableOnce[A]): C = this match { - case optimizedSet @ (_ : scala.collection.immutable.Set.Set1[A] | _: scala.collection.immutable.Set.Set2[A] | _: scala.collection.immutable.Set.Set3[A] | _: scala.collection.immutable.Set.Set4[A]) => - // StrictOptimizedSetOps optimization of concat (these Sets cannot extend StrictOptimizedSetOps because of binary-incompatible return type; cf. PR #10036) - var result = optimizedSet.asInstanceOf[scala.collection.immutable.SetOps[A, scala.collection.immutable.Set, scala.collection.immutable.Set[A]]] - val it = that.iterator - while (it.hasNext) result = result + it.next() - result.asInstanceOf[C] - case _ => fromSpecific(that match { - case that: collection.Iterable[A] => new View.Concat(this, that) - case _ => iterator.concat(that.iterator) - }) - } - - @deprecated("Consider requiring an immutable Set or fall back to Set.union", "2.13.0") - def + (elem: A): C = fromSpecific(new View.Appended(this, elem)) - - @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - def + (elem1: A, elem2: A, elems: A*): C = fromSpecific(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) - - /** Alias for `concat` */ - @`inline` final def ++ (that: collection.IterableOnce[A]): C = concat(that) - - /** Computes the union between of set and another set. - * - * @param that the set to form the union with. - * @return a new set consisting of all elements that are in this - * set or in the given set `that`. - */ - @`inline` final def union(that: Set[A]): C = concat(that) - - /** Alias for `union` */ - @`inline` final def | (that: Set[A]): C = concat(that) -} - -/** - * $factoryInfo - * @define coll set - * @define Coll `Set` - */ -@SerialVersionUID(3L) -object Set extends IterableFactory.Delegate[Set](immutable.Set) - -/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ -abstract class AbstractSet[A] extends AbstractIterable[A] with Set[A] diff --git a/tests/pos-special/stdlib/collection/SortedMap.scala b/tests/pos-special/stdlib/collection/SortedMap.scala deleted file mode 100644 index 7b9381ebb078..000000000000 --- a/tests/pos-special/stdlib/collection/SortedMap.scala +++ /dev/null @@ -1,222 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import scala.annotation.{implicitNotFound, nowarn} -import language.experimental.captureChecking - -/** A Map whose keys are sorted according to a [[scala.math.Ordering]]*/ -trait SortedMap[K, +V] - extends Map[K, V] - with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] - with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map]{ - - def unsorted: Map[K, V] = this - - def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix: String = "SortedMap" - - override def equals(that: Any): Boolean = that match { - case _ if this eq that.asInstanceOf[AnyRef] => true - case sm: SortedMap[K @unchecked, _] if sm.ordering == this.ordering => - (sm canEqual this) && - (this.size == sm.size) && { - val i1 = this.iterator - val i2 = sm.iterator - var allEqual = true - while (allEqual && i1.hasNext) { - val kv1 = i1.next() - val kv2 = i2.next() - allEqual = ordering.equiv(kv1._1, kv2._1) && kv1._2 == kv2._2 - } - allEqual - } - case _ => super.equals(that) - } -} - -trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] - extends MapOps[K, V, Map, C] - with SortedOps[K, C] - with Pure { - - /** The companion object of this sorted map, providing various factory methods. - * - * @note When implementing a custom collection type and refining `CC` to the new type, this - * method needs to be overridden to return a factory for the new type (the compiler will - * issue an error otherwise). - */ - def sortedMapFactory: SortedMapFactory[CC] - - /** Similar to `mapFromIterable`, but returns a SortedMap collection type. - * Note that the return type is now `CC[K2, V2]`. - */ - @`inline` protected final def sortedMapFromIterable[K2, V2](it: Iterable[(K2, V2)])(implicit ordering: Ordering[K2]): CC[K2, V2] = sortedMapFactory.from(it) - - def unsorted: Map[K, V] - - /** - * Creates an iterator over all the key/value pairs - * contained in this map having a key greater than or - * equal to `start` according to the ordering of - * this map. x.iteratorFrom(y) is equivalent - * to but often more efficient than x.from(y).iterator. - * - * @param start The lower bound (inclusive) - * on the keys to be returned - */ - def iteratorFrom(start: K): Iterator[(K, V)] - - /** - * Creates an iterator over all the keys(or elements) contained in this - * collection greater than or equal to `start` - * according to the ordering of this collection. x.keysIteratorFrom(y) - * is equivalent to but often more efficient than - * x.from(y).keysIterator. - * - * @param start The lower bound (inclusive) - * on the keys to be returned - */ - def keysIteratorFrom(start: K): Iterator[K] - - /** - * Creates an iterator over all the values contained in this - * map that are associated with a key greater than or equal to `start` - * according to the ordering of this map. x.valuesIteratorFrom(y) is - * equivalent to but often more efficient than - * x.from(y).valuesIterator. - * - * @param start The lower bound (inclusive) - * on the keys to be returned - */ - def valuesIteratorFrom(start: K): Iterator[V] = iteratorFrom(start).map(_._2) - - def firstKey: K = head._1 - def lastKey: K = last._1 - - /** Find the element with smallest key larger than or equal to a given key. - * @param key The given key. - * @return `None` if there is no such node. - */ - def minAfter(key: K): Option[(K, V)] = rangeFrom(key).headOption - - /** Find the element with largest key less than a given key. - * @param key The given key. - * @return `None` if there is no such node. - */ - def maxBefore(key: K): Option[(K, V)] = rangeUntil(key).lastOption - - def rangeTo(to: K): C = { - val i = keySet.rangeFrom(to).iterator - if (i.isEmpty) return coll - val next = i.next() - if (ordering.compare(next, to) == 0) - if (i.isEmpty) coll - else rangeUntil(i.next()) - else - rangeUntil(next) - } - - override def keySet: SortedSet[K] = new KeySortedSet - - /** The implementation class of the set returned by `keySet` */ - protected class KeySortedSet extends SortedSet[K] with GenKeySet with GenKeySortedSet { - def diff(that: Set[K]): SortedSet[K] = fromSpecific(view.filterNot(that)) - def rangeImpl(from: Option[K], until: Option[K]): SortedSet[K] = { - val map = SortedMapOps.this.rangeImpl(from, until) - new map.KeySortedSet - } - } - - /** A generic trait that is reused by sorted keyset implementations */ - protected trait GenKeySortedSet extends GenKeySet { this: SortedSet[K] => - implicit def ordering: Ordering[K] = SortedMapOps.this.ordering - def iteratorFrom(start: K): Iterator[K] = SortedMapOps.this.keysIteratorFrom(start) - } - - // And finally, we add new overloads taking an ordering - /** Builds a new sorted map by applying a function to all elements of this $coll. - * - * @param f the function to apply to each element. - * @return a new $coll resulting from applying the given function - * `f` to each element of this $coll and collecting the results. - */ - def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = - sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) - - /** Builds a new sorted map by applying a function to all elements of this $coll - * and using the elements of the resulting collections. - * - * @param f the function to apply to each element. - * @return a new $coll resulting from applying the given collection-valued function - * `f` to each element of this $coll and concatenating the results. - */ - def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = - sortedMapFactory.from(new View.FlatMap(this, f)) - - /** Builds a new sorted map by applying a partial function to all elements of this $coll - * on which the function is defined. - * - * @param pf the partial function which filters and maps the $coll. - * @return a new $coll resulting from applying the given partial function - * `pf` to each element on which it is defined and collecting the results. - * The order of the elements is preserved. - */ - def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = - sortedMapFactory.from(new View.Collect(this, pf)) - - override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CC[K, V2] = sortedMapFactory.from(suffix match { - case it: Iterable[(K, V2)] => new View.Concat(this, it) - case _ => iterator.concat(suffix.iterator) - })(ordering) - - /** Alias for `concat` */ - @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]^): CC[K, V2] = concat(xs) - - @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") - override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(this, kv))(ordering) - - @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems))(ordering) -} - -object SortedMapOps { - private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a SortedMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." - - /** Specializes `MapWithFilter` for sorted Map collections - * - * @define coll sorted map collection - */ - class WithFilter[K, +V, +IterableCC[_], +MapCC[X, Y] <: Map[X, Y], +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _]]( - self: SortedMapOps[K, V, CC, _] with MapOps[K, V, MapCC, _] with IterableOps[(K, V), IterableCC, _], - p: ((K, V)) => Boolean - ) extends MapOps.WithFilter[K, V, IterableCC, MapCC](self, p) { - - def map[K2 : Ordering, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = - self.sortedMapFactory.from(new View.Map(filtered, f)) - - def flatMap[K2 : Ordering, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = - self.sortedMapFactory.from(new View.FlatMap(filtered, f)) - - override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, MapCC, CC]^{this, q} = - new WithFilter[K, V, IterableCC, MapCC, CC](self, (kv: (K, V)) => p(kv) && q(kv)) - - } - -} - -@SerialVersionUID(3L) -object SortedMap extends SortedMapFactory.Delegate[SortedMap](immutable.SortedMap) diff --git a/tests/pos-special/stdlib/collection/SortedOps.scala b/tests/pos-special/stdlib/collection/SortedOps.scala deleted file mode 100644 index 16751d86d9d5..000000000000 --- a/tests/pos-special/stdlib/collection/SortedOps.scala +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import language.experimental.captureChecking - -/** Base trait for sorted collections */ -trait SortedOps[A, +C] { - - def ordering: Ordering[A] - - /** Returns the first key of the collection. */ - def firstKey: A - - /** Returns the last key of the collection. */ - def lastKey: A - - /** Comparison function that orders keys. */ - @deprecated("Use ordering.compare instead", "2.13.0") - @deprecatedOverriding("Use ordering.compare instead", "2.13.0") - @inline def compare(k0: A, k1: A): Int = ordering.compare(k0, k1) - - /** Creates a ranged projection of this collection. Any mutations in the - * ranged projection will update this collection and vice versa. - * - * Note: keys are not guaranteed to be consistent between this collection - * and the projection. This is the case for buffers where indexing is - * relative to the projection. - * - * @param from The lower-bound (inclusive) of the ranged projection. - * `None` if there is no lower bound. - * @param until The upper-bound (exclusive) of the ranged projection. - * `None` if there is no upper bound. - */ - def rangeImpl(from: Option[A], until: Option[A]): C - - /** Creates a ranged projection of this collection with both a lower-bound - * and an upper-bound. - * - * @param from The lower-bound (inclusive) of the ranged projection. - * @param until The upper-bound (exclusive) of the ranged projection. - */ - def range(from: A, until: A): C = rangeImpl(Some(from), Some(until)) - - /** Creates a ranged projection of this collection with no upper-bound. - * - * @param from The lower-bound (inclusive) of the ranged projection. - */ - @deprecated("Use rangeFrom", "2.13.0") - final def from(from: A): C = rangeFrom(from) - - /** Creates a ranged projection of this collection with no upper-bound. - * - * @param from The lower-bound (inclusive) of the ranged projection. - */ - def rangeFrom(from: A): C = rangeImpl(Some(from), None) - - /** Creates a ranged projection of this collection with no lower-bound. - * - * @param until The upper-bound (exclusive) of the ranged projection. - */ - @deprecated("Use rangeUntil", "2.13.0") - final def until(until: A): C = rangeUntil(until) - - /** Creates a ranged projection of this collection with no lower-bound. - * - * @param until The upper-bound (exclusive) of the ranged projection. - */ - def rangeUntil(until: A): C = rangeImpl(None, Some(until)) - - /** Create a range projection of this collection with no lower-bound. - * @param to The upper-bound (inclusive) of the ranged projection. - */ - @deprecated("Use rangeTo", "2.13.0") - final def to(to: A): C = rangeTo(to) - - /** Create a range projection of this collection with no lower-bound. - * @param to The upper-bound (inclusive) of the ranged projection. - */ - def rangeTo(to: A): C -} diff --git a/tests/pos-special/stdlib/collection/SortedSet.scala b/tests/pos-special/stdlib/collection/SortedSet.scala deleted file mode 100644 index fb2f879edcd2..000000000000 --- a/tests/pos-special/stdlib/collection/SortedSet.scala +++ /dev/null @@ -1,190 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import scala.annotation.{implicitNotFound, nowarn} -import scala.annotation.unchecked.uncheckedVariance -import language.experimental.captureChecking - -/** Base type of sorted sets */ -trait SortedSet[A] extends Set[A] - with SortedSetOps[A, SortedSet, SortedSet[A]] - with SortedSetFactoryDefaults[A, SortedSet, Set] { - - def unsorted: Set[A] = this - - def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix: String = "SortedSet" - - override def equals(that: Any): Boolean = that match { - case _ if this eq that.asInstanceOf[AnyRef] => true - case ss: SortedSet[A @unchecked] if ss.ordering == this.ordering => - (ss canEqual this) && - (this.size == ss.size) && { - val i1 = this.iterator - val i2 = ss.iterator - var allEqual = true - while (allEqual && i1.hasNext) - allEqual = ordering.equiv(i1.next(), i2.next()) - allEqual - } - case _ => - super.equals(that) - } - -} - -trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] - extends SetOps[A, Set, C] - with SortedOps[A, C] { - - /** The companion object of this sorted set, providing various factory methods. - * - * @note When implementing a custom collection type and refining `CC` to the new type, this - * method needs to be overridden to return a factory for the new type (the compiler will - * issue an error otherwise). - */ - def sortedIterableFactory: SortedIterableFactory[CC] - - def unsorted: Set[A] - - /** - * Creates an iterator that contains all values from this collection - * greater than or equal to `start` according to the ordering of - * this collection. x.iteratorFrom(y) is equivalent to but will usually - * be more efficient than x.from(y).iterator - * - * @param start The lower-bound (inclusive) of the iterator - */ - def iteratorFrom(start: A): Iterator[A] - - @deprecated("Use `iteratorFrom` instead.", "2.13.0") - @`inline` def keysIteratorFrom(start: A): Iterator[A] = iteratorFrom(start) - - def firstKey: A = head - def lastKey: A = last - - /** Find the smallest element larger than or equal to a given key. - * @param key The given key. - * @return `None` if there is no such node. - */ - def minAfter(key: A): Option[A] = rangeFrom(key).headOption - - /** Find the largest element less than a given key. - * @param key The given key. - * @return `None` if there is no such node. - */ - def maxBefore(key: A): Option[A] = rangeUntil(key).lastOption - - override def min[B >: A](implicit ord: Ordering[B]): A = - if (isEmpty) throw new UnsupportedOperationException("empty.min") - else if (ord == ordering) head - else if (ord isReverseOf ordering) last - else super.min[B] // need the type annotation for it to infer the correct implicit - - override def max[B >: A](implicit ord: Ordering[B]): A = - if (isEmpty) throw new UnsupportedOperationException("empty.max") - else if (ord == ordering) last - else if (ord isReverseOf ordering) head - else super.max[B] // need the type annotation for it to infer the correct implicit - - def rangeTo(to: A): C = { - val i = rangeFrom(to).iterator - if (i.isEmpty) return coll - val next = i.next() - if (ordering.compare(next, to) == 0) - if (i.isEmpty) coll - else rangeUntil(i.next()) - else - rangeUntil(next) - } - - /** Builds a new sorted collection by applying a function to all elements of this $coll. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned collection. - * @return a new $coll resulting from applying the given function - * `f` to each element of this $coll and collecting the results. - */ - def map[B](f: A => B)(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = - sortedIterableFactory.from(new View.Map(this, f)) - - /** Builds a new sorted collection by applying a function to all elements of this $coll - * and using the elements of the resulting collections. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned collection. - * @return a new $coll resulting from applying the given collection-valued function - * `f` to each element of this $coll and concatenating the results. - */ - def flatMap[B](f: A => IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = - sortedIterableFactory.from(new View.FlatMap(this, f)) - - /** Returns a $coll formed from this $coll and another iterable collection - * by combining corresponding elements in pairs. - * If one of the two collections is longer than the other, its remaining elements are ignored. - * - * @param that The iterable providing the second half of each result pair - * @tparam B the type of the second half of the returned pairs - * @return a new $coll containing pairs consisting of corresponding elements of this $coll and `that`. - * The length of the returned collection is the minimum of the lengths of this $coll and `that`. - */ - def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.zipOrdMsg) ev: Ordering[(A @uncheckedVariance, B)]): CC[(A @uncheckedVariance, B)] = // sound bcs of VarianceNote - sortedIterableFactory.from(that match { - case that: Iterable[B] => new View.Zip(this, that) - case _ => iterator.zip(that) - }) - - /** Builds a new sorted collection by applying a partial function to all elements of this $coll - * on which the function is defined. - * - * @param pf the partial function which filters and maps the $coll. - * @tparam B the element type of the returned collection. - * @return a new $coll resulting from applying the given partial function - * `pf` to each element on which it is defined and collecting the results. - * The order of the elements is preserved. - */ - def collect[B](pf: scala.PartialFunction[A, B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = - sortedIterableFactory.from(new View.Collect(this, pf)) -} - -object SortedSetOps { - private[collection] final val ordMsg = "No implicit Ordering[${B}] found to build a SortedSet[${B}]. You may want to upcast to a Set[${A}] first by calling `unsorted`." - private[collection] final val zipOrdMsg = "No implicit Ordering[${B}] found to build a SortedSet[(${A}, ${B})]. You may want to upcast to a Set[${A}] first by calling `unsorted`." - - /** Specialize `WithFilter` for sorted collections - * - * @define coll sorted collection - */ - class WithFilter[+A, +IterableCC[_], +CC[X] <: SortedSet[X]]( - self: SortedSetOps[A, CC, _] with IterableOps[A, IterableCC, _], - p: A => Boolean - ) extends IterableOps.WithFilter[A, IterableCC](self, p) { - - def map[B : Ordering](f: A => B): CC[B] = - self.sortedIterableFactory.from(new View.Map(filtered, f)) - - def flatMap[B : Ordering](f: A => IterableOnce[B]): CC[B] = - self.sortedIterableFactory.from(new View.FlatMap(filtered, f)) - - override def withFilter(q: A => Boolean): WithFilter[A, IterableCC, CC]^{this, q} = - new WithFilter[A, IterableCC, CC](self, (a: A) => p(a) && q(a)) - } - -} - -@SerialVersionUID(3L) -object SortedSet extends SortedIterableFactory.Delegate[SortedSet](immutable.SortedSet) - diff --git a/tests/pos-special/stdlib/collection/Stepper.scala b/tests/pos-special/stdlib/collection/Stepper.scala deleted file mode 100644 index 0a0ac0075990..000000000000 --- a/tests/pos-special/stdlib/collection/Stepper.scala +++ /dev/null @@ -1,378 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import java.util.function.{Consumer, DoubleConsumer, IntConsumer, LongConsumer} -import java.util.{PrimitiveIterator, Spliterator, Iterator => JIterator} -import java.{lang => jl} -import language.experimental.captureChecking - -import scala.collection.Stepper.EfficientSplit - -/** Steppers exist to enable creating Java streams over Scala collections, see - * [[scala.jdk.StreamConverters]]. Besides that use case, they allow iterating over collections - * holding unboxed primitives (e.g., `Array[Int]`) without boxing the elements. - * - * Steppers have an iterator-like interface with methods `hasStep` and `nextStep()`. The difference - * to iterators - and the reason `Stepper` is not a subtype of `Iterator` - is that there are - * hand-specialized variants of `Stepper` for `Int`, `Long` and `Double` ([[IntStepper]], etc.). - * These enable iterating over collections holding unboxed primitives (e.g., Arrays, - * [[scala.jdk.Accumulator]]s) without boxing the elements. - * - * The selection of primitive types (`Int`, `Long` and `Double`) matches the hand-specialized - * variants of Java Streams ([[java.util.stream.Stream]], [[java.util.stream.IntStream]], etc.) - * and the corresponding Java Spliterators ([[java.util.Spliterator]], [[java.util.Spliterator.OfInt]], etc.). - * - * Steppers can be converted to Scala Iterators, Java Iterators and Java Spliterators. Primitive - * Steppers are converted to the corresponding primitive Java Iterators and Spliterators. - * - * @tparam A the element type of the Stepper - */ -trait Stepper[@specialized(Double, Int, Long) +A] { - this: Stepper[A]^ => - - /** Check if there's an element available. */ - def hasStep: Boolean - - /** Return the next element and advance the stepper */ - def nextStep(): A - - /** Split this stepper, if applicable. The elements of the current Stepper are split up between - * the resulting Stepper and the current stepper. - * - * May return `null`, in which case the current Stepper yields the same elements as before. - * - * See method `trySplit` in [[java.util.Spliterator]]. - */ - def trySplit(): Stepper[A] - - /** Returns an estimate of the number of elements of this Stepper, or [[Long.MaxValue]]. See - * method `estimateSize` in [[java.util.Spliterator]]. - */ - def estimateSize: Long - - /** Returns a set of characteristics of this Stepper and its elements. See method - * `characteristics` in [[java.util.Spliterator]]. - */ - def characteristics: Int - - /** Returns a [[java.util.Spliterator]] corresponding to this Stepper. - * - * Note that the return type is `Spliterator[_]` instead of `Spliterator[A]` to allow returning - * a [[java.util.Spliterator.OfInt]] (which is a `Spliterator[Integer]`) in the subclass [[IntStepper]] - * (which is a `Stepper[Int]`). - */ - def spliterator[B >: A]: Spliterator[_] - - /** Returns a Java [[java.util.Iterator]] corresponding to this Stepper. - * - * Note that the return type is `Iterator[_]` instead of `Iterator[A]` to allow returning - * a [[java.util.PrimitiveIterator.OfInt]] (which is a `Iterator[Integer]`) in the subclass - * [[IntStepper]] (which is a `Stepper[Int]`). - */ - def javaIterator[B >: A]: JIterator[_] - - /** Returns an [[Iterator]] corresponding to this Stepper. Note that Iterators corresponding to - * primitive Steppers box the elements. - */ - def iterator: Iterator[A] = new AbstractIterator[A] { - def hasNext: Boolean = hasStep - def next(): A = nextStep() - } -} - -object Stepper { - /** A marker trait that indicates that a `Stepper` can call `trySplit` with at worst O(log N) time - * and space complexity, and that the division is likely to be reasonably even. Steppers marked - * with `EfficientSplit` can be converted to parallel streams with the `asJavaParStream` method - * defined in [[scala.jdk.StreamConverters]]. - */ - trait EfficientSplit - - private[collection] final def throwNSEE(): Nothing = throw new NoSuchElementException("Empty Stepper") - - /* These adapter classes can wrap an AnyStepper of a numeric type into a possibly widened primitive Stepper type. - * This provides a basis for more efficient stream processing on unboxed values provided that the original source - * of the data is boxed. In other cases native implementations of the primitive stepper types should be provided - * (see for example IntArrayStepper and WidenedByteArrayStepper). */ - - private[collection] class UnboxingDoubleStepper(st: AnyStepper[Double]) extends DoubleStepper { - def hasStep: Boolean = st.hasStep - def nextStep(): Double = st.nextStep() - def estimateSize: Long = st.estimateSize - def characteristics: Int = st.characteristics - def trySplit(): DoubleStepper = { - val s = st.trySplit() - if (s == null) null else new UnboxingDoubleStepper(s) - } - } - - private[collection] class UnboxingIntStepper(st: AnyStepper[Int]) extends IntStepper { - def hasStep: Boolean = st.hasStep - def nextStep(): Int = st.nextStep() - def estimateSize: Long = st.estimateSize - def characteristics: Int = st.characteristics - def trySplit(): IntStepper = { - val s = st.trySplit() - if (s == null) null else new UnboxingIntStepper(s) - } - } - - private[collection] class UnboxingLongStepper(st: AnyStepper[Long]) extends LongStepper { - def hasStep: Boolean = st.hasStep - def nextStep(): Long = st.nextStep() - def estimateSize: Long = st.estimateSize - def characteristics: Int = st.characteristics - def trySplit(): LongStepper = { - val s = st.trySplit() - if (s == null) null else new UnboxingLongStepper(s) - } - } - - private[collection] class UnboxingByteStepper(st: AnyStepper[Byte]) extends IntStepper { - def hasStep: Boolean = st.hasStep - def nextStep(): Int = st.nextStep() - def estimateSize: Long = st.estimateSize - def characteristics: Int = st.characteristics - def trySplit(): IntStepper = { - val s = st.trySplit() - if (s == null) null else new UnboxingByteStepper(s) - } - } - - private[collection] class UnboxingCharStepper(st: AnyStepper[Char]) extends IntStepper { - def hasStep: Boolean = st.hasStep - def nextStep(): Int = st.nextStep() - def estimateSize: Long = st.estimateSize - def characteristics: Int = st.characteristics - def trySplit(): IntStepper = { - val s = st.trySplit() - if (s == null) null else new UnboxingCharStepper(s) - } - } - - private[collection] class UnboxingShortStepper(st: AnyStepper[Short]) extends IntStepper { - def hasStep: Boolean = st.hasStep - def nextStep(): Int = st.nextStep() - def estimateSize: Long = st.estimateSize - def characteristics: Int = st.characteristics - def trySplit(): IntStepper = { - val s = st.trySplit() - if (s == null) null else new UnboxingShortStepper(s) - } - } - - private[collection] class UnboxingFloatStepper(st: AnyStepper[Float]) extends DoubleStepper { - def hasStep: Boolean = st.hasStep - def nextStep(): Double = st.nextStep() - def estimateSize: Long = st.estimateSize - def characteristics: Int = st.characteristics - def trySplit(): DoubleStepper = { - val s = st.trySplit() - if (s == null) null else new UnboxingFloatStepper(s) - } - } -} - -/** A Stepper for arbitrary element types. See [[Stepper]]. */ -trait AnyStepper[+A] extends Stepper[A] { - this: AnyStepper[A]^ => - - def trySplit(): AnyStepper[A] - - def spliterator[B >: A]: Spliterator[B]^{this} = new AnyStepper.AnyStepperSpliterator(this) - - def javaIterator[B >: A]: JIterator[B] = new JIterator[B] { - def hasNext: Boolean = hasStep - def next(): B = nextStep() - } -} - -object AnyStepper { - class AnyStepperSpliterator[A](s: AnyStepper[A]^) extends Spliterator[A] { - def tryAdvance(c: Consumer[_ >: A]): Boolean = - if (s.hasStep) { c.accept(s.nextStep()); true } else false - def trySplit(): Spliterator[A]^{this} = { - val sp = s.trySplit() - if (sp == null) null else sp.spliterator - } - def estimateSize(): Long = s.estimateSize - def characteristics(): Int = s.characteristics - // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance - override def forEachRemaining(c: Consumer[_ >: A]): Unit = - while (s.hasStep) { c.accept(s.nextStep()) } - } - - def ofSeqDoubleStepper(st: DoubleStepper): AnyStepper[Double] = new BoxedDoubleStepper(st) - def ofParDoubleStepper(st: DoubleStepper with EfficientSplit): AnyStepper[Double] with EfficientSplit = new BoxedDoubleStepper(st) with EfficientSplit - - def ofSeqIntStepper(st: IntStepper): AnyStepper[Int] = new BoxedIntStepper(st) - def ofParIntStepper(st: IntStepper with EfficientSplit): AnyStepper[Int] with EfficientSplit = new BoxedIntStepper(st) with EfficientSplit - - def ofSeqLongStepper(st: LongStepper): AnyStepper[Long] = new BoxedLongStepper(st) - def ofParLongStepper(st: LongStepper with EfficientSplit): AnyStepper[Long] with EfficientSplit = new BoxedLongStepper(st) with EfficientSplit - - private[collection] class BoxedDoubleStepper(st: DoubleStepper) extends AnyStepper[Double] { - def hasStep: Boolean = st.hasStep - def nextStep(): Double = st.nextStep() - def estimateSize: Long = st.estimateSize - def characteristics: Int = st.characteristics - def trySplit(): AnyStepper[Double] = { - val s = st.trySplit() - if (s == null) null else new BoxedDoubleStepper(s) - } - } - - private[collection] class BoxedIntStepper(st: IntStepper) extends AnyStepper[Int] { - def hasStep: Boolean = st.hasStep - def nextStep(): Int = st.nextStep() - def estimateSize: Long = st.estimateSize - def characteristics: Int = st.characteristics - def trySplit(): AnyStepper[Int] = { - val s = st.trySplit() - if (s == null) null else new BoxedIntStepper(s) - } - } - - private[collection] class BoxedLongStepper(st: LongStepper) extends AnyStepper[Long] { - def hasStep: Boolean = st.hasStep - def nextStep(): Long = st.nextStep() - def estimateSize: Long = st.estimateSize - def characteristics: Int = st.characteristics - def trySplit(): AnyStepper[Long] = { - val s = st.trySplit() - if (s == null) null else new BoxedLongStepper(s) - } - } -} - -/** A Stepper for Ints. See [[Stepper]]. */ -trait IntStepper extends Stepper[Int] { - this: IntStepper^ => - - def trySplit(): IntStepper - - def spliterator[B >: Int]: Spliterator.OfInt^{this} = new IntStepper.IntStepperSpliterator(this) - - def javaIterator[B >: Int]: PrimitiveIterator.OfInt = new PrimitiveIterator.OfInt { - def hasNext: Boolean = hasStep - def nextInt(): Int = nextStep() - } -} -object IntStepper { - class IntStepperSpliterator(s: IntStepper^) extends Spliterator.OfInt { - def tryAdvance(c: IntConsumer): Boolean = - if (s.hasStep) { c.accept(s.nextStep()); true } else false - // Override for efficiency: don't wrap the function and call the `tryAdvance` overload - override def tryAdvance(c: Consumer[_ >: jl.Integer]): Boolean = (c: AnyRef) match { - case ic: IntConsumer => tryAdvance(ic) - case _ => if (s.hasStep) { c.accept(jl.Integer.valueOf(s.nextStep())); true } else false - } - // override required for dotty#6152 - override def trySplit(): Spliterator.OfInt^{this} = { - val sp = s.trySplit() - if (sp == null) null else sp.spliterator - } - def estimateSize(): Long = s.estimateSize - def characteristics(): Int = s.characteristics - // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance - override def forEachRemaining(c: IntConsumer): Unit = - while (s.hasStep) { c.accept(s.nextStep()) } - // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance - override def forEachRemaining(c: Consumer[_ >: jl.Integer]): Unit = (c: AnyRef) match { - case ic: IntConsumer => forEachRemaining(ic) - case _ => while (s.hasStep) { c.accept(jl.Integer.valueOf(s.nextStep())) } - } - } -} - -/** A Stepper for Doubles. See [[Stepper]]. */ -trait DoubleStepper extends Stepper[Double] { - this: DoubleStepper^ => - def trySplit(): DoubleStepper - - def spliterator[B >: Double]: Spliterator.OfDouble^{this} = new DoubleStepper.DoubleStepperSpliterator(this) - - def javaIterator[B >: Double]: PrimitiveIterator.OfDouble^{this} = new PrimitiveIterator.OfDouble { - def hasNext: Boolean = hasStep - def nextDouble(): Double = nextStep() - } -} - -object DoubleStepper { - class DoubleStepperSpliterator(s: DoubleStepper^) extends Spliterator.OfDouble { - def tryAdvance(c: DoubleConsumer): Boolean = - if (s.hasStep) { c.accept(s.nextStep()); true } else false - // Override for efficiency: don't wrap the function and call the `tryAdvance` overload - override def tryAdvance(c: Consumer[_ >: jl.Double]): Boolean = (c: AnyRef) match { - case ic: DoubleConsumer => tryAdvance(ic) - case _ => if (s.hasStep) { c.accept(java.lang.Double.valueOf(s.nextStep())); true } else false - } - // override required for dotty#6152 - override def trySplit(): Spliterator.OfDouble^{this} = { - val sp = s.trySplit() - if (sp == null) null else sp.spliterator - } - def estimateSize(): Long = s.estimateSize - def characteristics(): Int = s.characteristics - // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance - override def forEachRemaining(c: DoubleConsumer): Unit = - while (s.hasStep) { c.accept(s.nextStep()) } - // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance - override def forEachRemaining(c: Consumer[_ >: jl.Double]): Unit = (c: AnyRef) match { - case ic: DoubleConsumer => forEachRemaining(ic) - case _ => while (s.hasStep) { c.accept(jl.Double.valueOf(s.nextStep())) } - } - } -} - -/** A Stepper for Longs. See [[Stepper]]. */ -trait LongStepper extends Stepper[Long] { - this: LongStepper^ => - - def trySplit(): LongStepper^{this} - - def spliterator[B >: Long]: Spliterator.OfLong^{this} = new LongStepper.LongStepperSpliterator(this) - - def javaIterator[B >: Long]: PrimitiveIterator.OfLong^{this} = new PrimitiveIterator.OfLong { - def hasNext: Boolean = hasStep - def nextLong(): Long = nextStep() - } -} - -object LongStepper { - class LongStepperSpliterator(s: LongStepper^) extends Spliterator.OfLong { - def tryAdvance(c: LongConsumer): Boolean = - if (s.hasStep) { c.accept(s.nextStep()); true } else false - // Override for efficiency: don't wrap the function and call the `tryAdvance` overload - override def tryAdvance(c: Consumer[_ >: jl.Long]): Boolean = (c: AnyRef) match { - case ic: LongConsumer => tryAdvance(ic) - case _ => if (s.hasStep) { c.accept(java.lang.Long.valueOf(s.nextStep())); true } else false - } - // override required for dotty#6152 - override def trySplit(): Spliterator.OfLong^{this} = { - val sp = s.trySplit() - if (sp == null) null else sp.spliterator - } - def estimateSize(): Long = s.estimateSize - def characteristics(): Int = s.characteristics - // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance - override def forEachRemaining(c: LongConsumer): Unit = - while (s.hasStep) { c.accept(s.nextStep()) } - // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance - override def forEachRemaining(c: Consumer[_ >: jl.Long]): Unit = (c: AnyRef) match { - case ic: LongConsumer => forEachRemaining(ic) - case _ => while (s.hasStep) { c.accept(jl.Long.valueOf(s.nextStep())) } - } - } -} diff --git a/tests/pos-special/stdlib/collection/StepperShape.scala b/tests/pos-special/stdlib/collection/StepperShape.scala deleted file mode 100644 index c6b520400d89..000000000000 --- a/tests/pos-special/stdlib/collection/StepperShape.scala +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import java.{lang => jl} - -import language.experimental.captureChecking -import scala.collection.Stepper.EfficientSplit - -/** An implicit StepperShape instance is used in the [[IterableOnce.stepper]] to return a possibly - * specialized Stepper `S` according to the element type `T`. - */ -sealed trait StepperShape[-T, S <: Stepper[_]^] extends Pure { - /** Return the Int constant (as defined in the `StepperShape` companion object) for this `StepperShape`. */ - def shape: StepperShape.Shape - - /** Create an unboxing primitive sequential Stepper from a boxed `AnyStepper`. - * This is an identity operation for reference shapes. */ - def seqUnbox(st: AnyStepper[T]): S - - /** Create an unboxing primitive parallel (i.e. `with EfficientSplit`) Stepper from a boxed `AnyStepper`. - * This is an identity operation for reference shapes. */ - def parUnbox(st: AnyStepper[T] with EfficientSplit): S with EfficientSplit -} - -object StepperShape extends StepperShapeLowPriority1 { - class Shape private[StepperShape] (private val s: Int) extends AnyVal - - // reference - val ReferenceShape = new Shape(0) - - // primitive - val IntShape = new Shape(1) - val LongShape = new Shape(2) - val DoubleShape = new Shape(3) - - // widening - val ByteShape = new Shape(4) - val ShortShape = new Shape(5) - val CharShape = new Shape(6) - val FloatShape = new Shape(7) - - implicit val intStepperShape: StepperShape[Int, IntStepper] = new StepperShape[Int, IntStepper] { - def shape = IntShape - def seqUnbox(st: AnyStepper[Int]): IntStepper = new Stepper.UnboxingIntStepper(st) - def parUnbox(st: AnyStepper[Int] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingIntStepper(st) with EfficientSplit - } - implicit val jIntegerStepperShape: StepperShape[jl.Integer, IntStepper] = intStepperShape.asInstanceOf[StepperShape[jl.Integer, IntStepper]] - - implicit val longStepperShape: StepperShape[Long, LongStepper] = new StepperShape[Long, LongStepper] { - def shape = LongShape - def seqUnbox(st: AnyStepper[Long]): LongStepper = new Stepper.UnboxingLongStepper(st) - def parUnbox(st: AnyStepper[Long] with EfficientSplit): LongStepper with EfficientSplit = new Stepper.UnboxingLongStepper(st) with EfficientSplit - } - implicit val jLongStepperShape: StepperShape[jl.Long, LongStepper] = longStepperShape.asInstanceOf[StepperShape[jl.Long, LongStepper]] - - implicit val doubleStepperShape: StepperShape[Double, DoubleStepper] = new StepperShape[Double, DoubleStepper] { - def shape = DoubleShape - def seqUnbox(st: AnyStepper[Double]): DoubleStepper = new Stepper.UnboxingDoubleStepper(st) - def parUnbox(st: AnyStepper[Double] with EfficientSplit): DoubleStepper with EfficientSplit = new Stepper.UnboxingDoubleStepper(st) with EfficientSplit - } - implicit val jDoubleStepperShape: StepperShape[jl.Double, DoubleStepper] = doubleStepperShape.asInstanceOf[StepperShape[jl.Double, DoubleStepper]] - - implicit val byteStepperShape: StepperShape[Byte, IntStepper] = new StepperShape[Byte, IntStepper] { - def shape = ByteShape - def seqUnbox(st: AnyStepper[Byte]): IntStepper = new Stepper.UnboxingByteStepper(st) - def parUnbox(st: AnyStepper[Byte] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingByteStepper(st) with EfficientSplit - } - implicit val jByteStepperShape: StepperShape[jl.Byte, IntStepper] = byteStepperShape.asInstanceOf[StepperShape[jl.Byte, IntStepper]] - - implicit val shortStepperShape: StepperShape[Short, IntStepper] = new StepperShape[Short, IntStepper] { - def shape = ShortShape - def seqUnbox(st: AnyStepper[Short]): IntStepper = new Stepper.UnboxingShortStepper(st) - def parUnbox(st: AnyStepper[Short] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingShortStepper(st) with EfficientSplit - } - implicit val jShortStepperShape: StepperShape[jl.Short, IntStepper] = shortStepperShape.asInstanceOf[StepperShape[jl.Short, IntStepper]] - - implicit val charStepperShape: StepperShape[Char, IntStepper] = new StepperShape[Char, IntStepper] { - def shape = CharShape - def seqUnbox(st: AnyStepper[Char]): IntStepper = new Stepper.UnboxingCharStepper(st) - def parUnbox(st: AnyStepper[Char] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingCharStepper(st) with EfficientSplit - } - implicit val jCharacterStepperShape: StepperShape[jl.Character, IntStepper] = charStepperShape.asInstanceOf[StepperShape[jl.Character, IntStepper]] - - implicit val floatStepperShape: StepperShape[Float, DoubleStepper] = new StepperShape[Float, DoubleStepper] { - def shape = FloatShape - def seqUnbox(st: AnyStepper[Float]): DoubleStepper = new Stepper.UnboxingFloatStepper(st) - def parUnbox(st: AnyStepper[Float] with EfficientSplit): DoubleStepper with EfficientSplit = new Stepper.UnboxingFloatStepper(st) with EfficientSplit - } - implicit val jFloatStepperShape: StepperShape[jl.Float, DoubleStepper] = floatStepperShape.asInstanceOf[StepperShape[jl.Float, DoubleStepper]] -} - -trait StepperShapeLowPriority1 extends StepperShapeLowPriority2 { - implicit def anyStepperShape[T]: StepperShape[T, AnyStepper[T]] = anyStepperShapePrototype.asInstanceOf[StepperShape[T, AnyStepper[T]]] -} - -trait StepperShapeLowPriority2 { - implicit def baseStepperShape[T]: StepperShape[T, Stepper[T]] = anyStepperShapePrototype.asInstanceOf[StepperShape[T, Stepper[T]]] - - protected val anyStepperShapePrototype: StepperShape[AnyRef, Stepper[AnyRef]] = new StepperShape[AnyRef, Stepper[AnyRef]] { - def shape = StepperShape.ReferenceShape - def seqUnbox(st: AnyStepper[AnyRef]): Stepper[AnyRef] = st - def parUnbox(st: AnyStepper[AnyRef] with EfficientSplit): Stepper[AnyRef] with EfficientSplit = st - } -} \ No newline at end of file diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala deleted file mode 100644 index a9c5e0af43b3..000000000000 --- a/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -import language.experimental.captureChecking - -/** - * Trait that overrides map operations to take advantage of strict builders. - * - * @tparam K Type of keys - * @tparam V Type of values - * @tparam CC Collection type constructor - * @tparam C Collection type - */ -trait StrictOptimizedMapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] - extends MapOps[K, V, CC, C] - with StrictOptimizedIterableOps[(K, V), Iterable, C] - with Pure { - - override def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = - strictOptimizedMap(mapFactory.newBuilder, f) - - override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = - strictOptimizedFlatMap(mapFactory.newBuilder, f) - - override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CC[K, V2] = - strictOptimizedConcat(suffix, mapFactory.newBuilder) - - override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): CC[K2, V2] = - strictOptimizedCollect(mapFactory.newBuilder, pf) - - @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = { - val b = mapFactory.newBuilder[K, V1] - b ++= this - b += elem1 - b += elem2 - if (elems.nonEmpty) b ++= elems - b.result() - } -} diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala index bfea9eda8bd3..50ddbca30f9e 100644 --- a/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala @@ -12,20 +12,19 @@ package scala.collection import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures /** * Trait that overrides operations on sequences in order * to take advantage of strict builders. */ trait StrictOptimizedSeqOps [+A, +CC[_], +C] - extends Any + extends AnyRef with SeqOps[A, CC, C] with StrictOptimizedIterableOps[A, CC, C] { override def distinctBy[B](f: A -> B): C = { val builder = newSpecificBuilder - val seen = mutable.HashSet.empty[B @uncheckedCaptures] + val seen = mutable.HashSet.empty[B] val it = this.iterator while (it.hasNext) { val next = it.next() @@ -80,7 +79,7 @@ trait StrictOptimizedSeqOps [+A, +CC[_], +C] override def diff[B >: A](that: Seq[B]): C = if (isEmpty || that.isEmpty) coll else { - val occ = occCounts[B @uncheckedCaptures](that) + val occ = occCounts(that) val b = newSpecificBuilder for (x <- this) { occ.updateWith(x) { @@ -98,7 +97,7 @@ trait StrictOptimizedSeqOps [+A, +CC[_], +C] override def intersect[B >: A](that: Seq[B]): C = if (isEmpty || that.isEmpty) empty else { - val occ = occCounts[B @uncheckedCaptures](that) + val occ = occCounts(that) val b = newSpecificBuilder for (x <- this) { occ.updateWith(x) { diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala deleted file mode 100644 index 8ed337fff998..000000000000 --- a/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -import language.experimental.captureChecking - -/** - * Trait that overrides set operations to take advantage of strict builders. - * - * @tparam A Elements type - * @tparam CC Collection type constructor - * @tparam C Collection type - */ -trait StrictOptimizedSetOps[A, +CC[_], +C <: SetOps[A, CC, C]] - extends SetOps[A, CC, C] - with StrictOptimizedIterableOps[A, CC, C] { - - override def concat(that: IterableOnce[A]): C = - strictOptimizedConcat(that, newSpecificBuilder) - -} diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala deleted file mode 100644 index 9a9e6e367922..000000000000 --- a/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection - -import scala.annotation.implicitNotFound -import language.experimental.captureChecking - -/** - * Trait that overrides sorted map operations to take advantage of strict builders. - * - * @tparam K Type of keys - * @tparam V Type of values - * @tparam CC Collection type constructor - * @tparam C Collection type - */ -trait StrictOptimizedSortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] - extends SortedMapOps[K, V, CC, C] - with StrictOptimizedMapOps[K, V, Map, C] { - - override def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = - strictOptimizedMap(sortedMapFactory.newBuilder, f) - - override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = - strictOptimizedFlatMap(sortedMapFactory.newBuilder, f) - - override def concat[V2 >: V](xs: IterableOnce[(K, V2)]^): CC[K, V2] = - strictOptimizedConcat(xs, sortedMapFactory.newBuilder(ordering)) - - override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = - strictOptimizedCollect(sortedMapFactory.newBuilder, pf) - - @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = { - val m = ((this + elem1).asInstanceOf[Map[K, V]] + elem2).asInstanceOf[CC[K, V1]] - if(elems.isEmpty) m else m.concat(elems).asInstanceOf[CC[K, V1]] - } -} diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala deleted file mode 100644 index ded7deabccca..000000000000 --- a/tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import scala.annotation.implicitNotFound -import scala.annotation.unchecked.uncheckedVariance - -/** - * Trait that overrides sorted set operations to take advantage of strict builders. - * - * @tparam A Elements type - * @tparam CC Collection type constructor - * @tparam C Collection type - */ -trait StrictOptimizedSortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] - extends SortedSetOps[A, CC, C] - with StrictOptimizedSetOps[A, Set, C] { - - override def map[B](f: A => B)(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = - strictOptimizedMap(sortedIterableFactory.newBuilder, f) - - override def flatMap[B](f: A => IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = - strictOptimizedFlatMap(sortedIterableFactory.newBuilder, f) - - override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.zipOrdMsg) ev: Ordering[(A @uncheckedVariance, B)]): CC[(A @uncheckedVariance, B)] = - strictOptimizedZip(that, sortedIterableFactory.newBuilder[(A, B)]) - - override def collect[B](pf: PartialFunction[A, B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = - strictOptimizedCollect(sortedIterableFactory.newBuilder, pf) - -} diff --git a/tests/pos-special/stdlib/collection/StringOps.scala b/tests/pos-special/stdlib/collection/StringOps.scala index 3e3e2f8d872e..f570531def98 100644 --- a/tests/pos-special/stdlib/collection/StringOps.scala +++ b/tests/pos-special/stdlib/collection/StringOps.scala @@ -964,7 +964,7 @@ final class StringOps(private val s: String) extends AnyVal { else if (s.equalsIgnoreCase("false")) false else throw new IllegalArgumentException("For input string: \""+s+"\"") - def toArray[sealed B >: Char](implicit tag: ClassTag[B]): Array[B] = + def toArray[B >: Char](implicit tag: ClassTag[B]): Array[B] = if (tag == ClassTag.Char) s.toCharArray.asInstanceOf[Array[B]] else new WrappedString(s).toArray[B] diff --git a/tests/pos-special/stdlib/collection/StringParsers.scala b/tests/pos-special/stdlib/collection/StringParsers.scala deleted file mode 100644 index 47281815da71..000000000000 --- a/tests/pos-special/stdlib/collection/StringParsers.scala +++ /dev/null @@ -1,320 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection - -import scala.annotation.tailrec -import language.experimental.captureChecking - -/** A module containing the implementations of parsers from strings to numeric types, and boolean - */ -private[scala] object StringParsers { - - //compile-time constant helpers - - //Int.MinValue == -2147483648 - private final val intOverflowBoundary = -214748364 - private final val intOverflowDigit = 9 - //Long.MinValue == -9223372036854775808L - private final val longOverflowBoundary = -922337203685477580L - private final val longOverflowDigit = 9 - - @inline - private[this] final def decValue(ch: Char): Int = java.lang.Character.digit(ch, 10) - - @inline - private[this] final def stepToOverflow(from: String, len: Int, agg: Int, isPositive: Boolean, min: Int): Option[Int] = { - @tailrec - def rec(i: Int, agg: Int): Option[Int] = - if (agg < min) None - else if (i == len) { - if (!isPositive) Some(agg) - else if (agg == min) None - else Some(-agg) - } - else { - val digit = decValue(from.charAt(i)) - if (digit == -1) None - else rec(i + 1, agg * 10 - digit) - } - rec(1, agg) - } - - @inline - private[this] final def isDigit(c: Char): Boolean = c >= '0' && c <= '9' - - //bool - @inline - final def parseBool(from: String): Option[Boolean] = - if (from.equalsIgnoreCase("true")) Some(true) - else if (from.equalsIgnoreCase("false")) Some(false) - else None - - //integral types - final def parseByte(from: String): Option[Byte] = { - val len = from.length() - //empty strings parse to None - if (len == 0) None - else { - val first = from.charAt(0) - val v = decValue(first) - if (len == 1) { - //"+" and "-" parse to None - if (v > -1) Some(v.toByte) - else None - } - else if (v > -1) stepToOverflow(from, len, -v, true, Byte.MinValue).map(_.toByte) - else if (first == '+') stepToOverflow(from, len, 0, true, Byte.MinValue).map(_.toByte) - else if (first == '-') stepToOverflow(from, len, 0, false, Byte.MinValue).map(_.toByte) - else None - } - } - - final def parseShort(from: String): Option[Short] = { - val len = from.length() - //empty strings parse to None - if (len == 0) None - else { - val first = from.charAt(0) - val v = decValue(first) - if (len == 1) { - //"+" and "-" parse to None - if (v > -1) Some(v.toShort) - else None - } - else if (v > -1) stepToOverflow(from, len, -v, true, Short.MinValue).map(_.toShort) - else if (first == '+') stepToOverflow(from, len, 0, true, Short.MinValue).map(_.toShort) - else if (first == '-') stepToOverflow(from, len, 0, false, Short.MinValue).map(_.toShort) - else None - } - } - - final def parseInt(from: String): Option[Int] = { - val len = from.length() - - @tailrec - def step(i: Int, agg: Int, isPositive: Boolean): Option[Int] = { - if (i == len) { - if (!isPositive) Some(agg) - else if (agg == Int.MinValue) None - else Some(-agg) - } - else if (agg < intOverflowBoundary) None - else { - val digit = decValue(from.charAt(i)) - if (digit == -1 || (agg == intOverflowBoundary && digit == intOverflowDigit)) None - else step(i + 1, (agg * 10) - digit, isPositive) - } - } - //empty strings parse to None - if (len == 0) None - else { - val first = from.charAt(0) - val v = decValue(first) - if (len == 1) { - //"+" and "-" parse to None - if (v > -1) Some(v) - else None - } - else if (v > -1) step(1, -v, true) - else if (first == '+') step(1, 0, true) - else if (first == '-') step(1, 0, false) - else None - } - } - - final def parseLong(from: String): Option[Long] = { - //like parseInt, but Longer - val len = from.length() - - @tailrec - def step(i: Int, agg: Long, isPositive: Boolean): Option[Long] = { - if (i == len) { - if (isPositive && agg == Long.MinValue) None - else if (isPositive) Some(-agg) - else Some(agg) - } - else if (agg < longOverflowBoundary) None - else { - val digit = decValue(from.charAt(i)) - if (digit == -1 || (agg == longOverflowBoundary && digit == longOverflowDigit)) None - else step(i + 1, agg * 10 - digit, isPositive) - } - } - //empty strings parse to None - if (len == 0) None - else { - val first = from.charAt(0) - val v = decValue(first).toLong - if (len == 1) { - //"+" and "-" parse to None - if (v > -1) Some(v) - else None - } - else if (v > -1) step(1, -v, true) - else if (first == '+') step(1, 0, true) - else if (first == '-') step(1, 0, false) - else None - } - } - - //floating point - final def checkFloatFormat(format: String): Boolean = { - //indices are tracked with a start index which points *at* the first index - //and an end index which points *after* the last index - //so that slice length === end - start - //thus start == end <=> empty slice - //and format.substring(start, end) is equivalent to the slice - - //some utilities for working with index bounds into the original string - @inline - def forAllBetween(start: Int, end: Int, pred: Char => Boolean): Boolean = { - @tailrec - def rec(i: Int): Boolean = i >= end || pred(format.charAt(i)) && rec(i + 1) - rec(start) - } - - //one after last index for the predicate to hold, or `from` if none hold - //may point after the end of the string - @inline - def skipIndexWhile(predicate: Char => Boolean, from: Int, until: Int): Int = { - @tailrec @inline - def rec(i: Int): Int = if ((i < until) && predicate(format.charAt(i))) rec(i + 1) - else i - rec(from) - } - - - def isHexFloatLiteral(startIndex: Int, endIndex: Int): Boolean = { - def isHexDigit(ch: Char) = ((ch >= '0' && ch <= '9') || - (ch >= 'a' && ch <= 'f') || - (ch >= 'A' && ch <= 'F')) - - def prefixOK(startIndex: Int, endIndex: Int): Boolean = { - val len = endIndex - startIndex - (len > 0) && { - //the prefix part is - //hexDigits - //hexDigits. - //hexDigits.hexDigits - //.hexDigits - //but not . - if (format.charAt(startIndex) == '.') { - (len > 1) && forAllBetween(startIndex + 1, endIndex, isHexDigit) - } else { - val noLeading = skipIndexWhile(isHexDigit, startIndex, endIndex) - (noLeading >= endIndex) || - ((format.charAt(noLeading) == '.') && forAllBetween(noLeading + 1, endIndex, isHexDigit)) - } - } - } - - def postfixOK(startIndex: Int, endIndex: Int): Boolean = - (startIndex < endIndex) && { - (forAllBetween(startIndex, endIndex, isDigit)) || { - val startchar = format.charAt(startIndex) - (startchar == '+' || startchar == '-') && - (endIndex - startIndex > 1) && - forAllBetween(startIndex + 1, endIndex, isDigit) - } - } - // prefix [pP] postfix - val pIndex = format.indexWhere(ch => ch == 'p' || ch == 'P', startIndex) - (pIndex <= endIndex) && prefixOK(startIndex, pIndex) && postfixOK(pIndex + 1, endIndex) - } - - def isDecFloatLiteral(startIndex: Int, endIndex: Int): Boolean = { - //invariant: endIndex > startIndex - - def isExp(c: Char): Boolean = c == 'e' || c == 'E' - - def expOK(startIndex: Int, endIndex: Int): Boolean = - (startIndex < endIndex) && { - val startChar = format.charAt(startIndex) - if (startChar == '+' || startChar == '-') - (endIndex > (startIndex + 1)) && - skipIndexWhile(isDigit, startIndex + 1, endIndex) == endIndex - else skipIndexWhile(isDigit, startIndex, endIndex) == endIndex - } - - //significant can be one of - //* digits.digits - //* .digits - //* digits. - //but not just . - val startChar = format.charAt(startIndex) - if (startChar == '.') { - val noSignificant = skipIndexWhile(isDigit, startIndex + 1, endIndex) - // a digit is required followed by optional exp - (noSignificant > startIndex + 1) && (noSignificant >= endIndex || - isExp(format.charAt(noSignificant)) && expOK(noSignificant + 1, endIndex) - ) - } - else if (isDigit(startChar)) { - // one set of digits, then optionally a period, then optionally another set of digits, then optionally an exponent - val noInt = skipIndexWhile(isDigit, startIndex, endIndex) - // just the digits - (noInt == endIndex) || { - if (format.charAt(noInt) == '.') { - val noSignificant = skipIndexWhile(isDigit, noInt + 1, endIndex) - (noSignificant >= endIndex) || //no exponent - isExp(format.charAt(noSignificant)) && expOK(noSignificant + 1, endIndex) - } else - isExp(format.charAt(noInt)) && expOK(noInt + 1, endIndex) - } - } - else false - } - - //count 0x00 to 0x20 as "whitespace", and nothing else - val unspacedStart = format.indexWhere(ch => ch.toInt > 0x20) - val unspacedEnd = format.lastIndexWhere(ch => ch.toInt > 0x20) + 1 - - if (unspacedStart == -1 || unspacedStart >= unspacedEnd || unspacedEnd <= 0) false - else { - //all formats can have a sign - val unsigned = { - val startchar = format.charAt(unspacedStart) - if (startchar == '-' || startchar == '+') unspacedStart + 1 else unspacedStart - } - if (unsigned >= unspacedEnd) false - //that's it for NaN and Infinity - else if (format.charAt(unsigned) == 'N') format.substring(unsigned, unspacedEnd) == "NaN" - else if (format.charAt(unsigned) == 'I') format.substring(unsigned, unspacedEnd) == "Infinity" - else { - //all other formats can have a format suffix - val desuffixed = { - val endchar = format.charAt(unspacedEnd - 1) - if (endchar == 'f' || endchar == 'F' || endchar == 'd' || endchar == 'D') unspacedEnd - 1 - else unspacedEnd - } - val len = desuffixed - unsigned - if (len <= 0) false - else if (len >= 2 && (format.charAt(unsigned + 1) == 'x' || format.charAt(unsigned + 1) == 'X')) - format.charAt(unsigned) == '0' && isHexFloatLiteral(unsigned + 2, desuffixed) - else isDecFloatLiteral(unsigned, desuffixed) - } - } - } - - @inline - def parseFloat(from: String): Option[Float] = - if (checkFloatFormat(from)) Some(java.lang.Float.parseFloat(from)) - else None - - @inline - def parseDouble(from: String): Option[Double] = - if (checkFloatFormat(from)) Some(java.lang.Double.parseDouble(from)) - else None - -} diff --git a/tests/pos-special/stdlib/collection/View.scala b/tests/pos-special/stdlib/collection/View.scala index d91fc0c49939..85910311a4c3 100644 --- a/tests/pos-special/stdlib/collection/View.scala +++ b/tests/pos-special/stdlib/collection/View.scala @@ -78,7 +78,7 @@ object View extends IterableFactory[View] { def empty[A]: View[A] = Empty - def newBuilder[sealed A]: Builder[A, View[A]] = ArrayBuffer.newBuilder[A].mapResult(from) + def newBuilder[A]: Builder[A, View[A]] = ArrayBuffer.newBuilder[A].mapResult(from) override def apply[A](xs: A*): View[A] = new Elems(xs: _*) diff --git a/tests/pos-special/stdlib/collection/WithFilter.scala b/tests/pos-special/stdlib/collection/WithFilter.scala deleted file mode 100644 index 0f3830e9fe25..000000000000 --- a/tests/pos-special/stdlib/collection/WithFilter.scala +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -import language.experimental.captureChecking - -/** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods - * of trait `Iterable`. - * - * @tparam A Element type (e.g. `Int`) - * @tparam CC Collection type constructor (e.g. `List`) - * - * @define coll collection - */ -@SerialVersionUID(3L) -abstract class WithFilter[+A, +CC[_]] extends Serializable { - this: WithFilter[A, CC]^ => - - /** Builds a new collection by applying a function to all elements of the - * `filtered` outer $coll. - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned collection. - * @return a new $coll resulting from applying - * the given function `f` to each element of the filtered outer $coll - * and collecting the results. - */ - def map[B](f: A => B): CC[B]^{this, f} - - /** Builds a new collection by applying a function to all elements of the - * `filtered` outer $coll containing this `WithFilter` instance that satisfy - * - * @param f the function to apply to each element. - * @tparam B the element type of the returned collection. - * @return a new $coll resulting from applying - * the given collection-valued function `f` to each element - * of the filtered outer $coll and - * concatenating the results. - */ - def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} - - /** Applies a function `f` to all elements of the `filtered` outer $coll. - * - * @param f the function that is applied for its side-effect to every element. - * The result of function `f` is discarded. - * - * @tparam U the type parameter describing the result of function `f`. - * This result will always be ignored. Typically `U` is `Unit`, - * but this is not necessary. - */ - def foreach[U](f: A => U): Unit - - /** Further refines the filter for this `filtered` $coll. - * - * @param q the predicate used to test elements. - * @return an object of class `WithFilter`, which supports - * `map`, `flatMap`, `foreach`, and `withFilter` operations. - * All these operations apply to those elements of this $coll which - * also satisfy both `p` and `q` predicates. - */ - def withFilter(q: A => Boolean): WithFilter[A, CC]^{this, q} - -} diff --git a/tests/pos-special/stdlib/collection/concurrent/Map.scala b/tests/pos-special/stdlib/collection/concurrent/Map.scala deleted file mode 100644 index d985dad2edc5..000000000000 --- a/tests/pos-special/stdlib/collection/concurrent/Map.scala +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.concurrent - -import language.experimental.captureChecking -import scala.annotation.tailrec - -/** A template trait for mutable maps that allow concurrent access. - * - * $concurrentmapinfo - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#concurrent_maps "Scala's Collection Library overview"]] - * section on `Concurrent Maps` for more information. - * - * @tparam K the key type of the map - * @tparam V the value type of the map - * - * @define Coll `concurrent.Map` - * @define coll concurrent map - * @define concurrentmapinfo - * This is a base trait for all Scala concurrent map implementations. It - * provides all of the methods a `Map` does, with the difference that all the - * changes are atomic. It also describes methods specific to concurrent maps. - * - * '''Note''': The concurrent maps do not accept `'''null'''` for keys or values. - * - * @define atomicop - * This is an atomic operation. - */ -trait Map[K, V] extends scala.collection.mutable.Map[K, V] { - - /** - * Associates the given key with a given value, unless the key was already - * associated with some other value. - * - * $atomicop - * - * @param k key with which the specified value is to be associated with - * @param v value to be associated with the specified key - * @return `Some(oldvalue)` if there was a value `oldvalue` previously - * associated with the specified key, or `None` if there was no - * mapping for the specified key - */ - def putIfAbsent(k: K, v: V): Option[V] - - /** - * Removes the entry for the specified key if it's currently mapped to the - * specified value. - * - * $atomicop - * - * @param k key for which the entry should be removed - * @param v value expected to be associated with the specified key if - * the removal is to take place - * @return `true` if the removal took place, `false` otherwise - */ - def remove(k: K, v: V): Boolean - - /** - * Replaces the entry for the given key only if it was previously mapped to - * a given value. - * - * $atomicop - * - * @param k key for which the entry should be replaced - * @param oldvalue value expected to be associated with the specified key - * if replacing is to happen - * @param newvalue value to be associated with the specified key - * @return `true` if the entry was replaced, `false` otherwise - */ - def replace(k: K, oldvalue: V, newvalue: V): Boolean - - /** - * Replaces the entry for the given key only if it was previously mapped - * to some value. - * - * $atomicop - * - * @param k key for which the entry should be replaced - * @param v value to be associated with the specified key - * @return `Some(v)` if the given key was previously mapped to some value `v`, or `None` otherwise - */ - def replace(k: K, v: V): Option[V] - - override def getOrElseUpdate(key: K, op: => V): V = get(key) match { - case Some(v) => v - case None => - val v = op - putIfAbsent(key, v) match { - case Some(ov) => ov - case None => v - } - } - - /** - * Removes the entry for the specified key if it's currently mapped to the - * specified value. Comparison to the specified value is done using reference - * equality. - * - * Not all map implementations can support removal based on reference - * equality, and for those implementations, object equality is used instead. - * - * $atomicop - * - * @param k key for which the entry should be removed - * @param v value expected to be associated with the specified key if - * the removal is to take place - * @return `true` if the removal took place, `false` otherwise - */ - // TODO: make part of the API in a future version - private[collection] def removeRefEq(k: K, v: V): Boolean = remove(k, v) - - /** - * Replaces the entry for the given key only if it was previously mapped to - * a given value. Comparison to the specified value is done using reference - * equality. - * - * Not all map implementations can support replacement based on reference - * equality, and for those implementations, object equality is used instead. - * - * $atomicop - * - * @param k key for which the entry should be replaced - * @param oldValue value expected to be associated with the specified key - * if replacing is to happen - * @param newValue value to be associated with the specified key - * @return `true` if the entry was replaced, `false` otherwise - */ - // TODO: make part of the API in a future version - private[collection] def replaceRefEq(k: K, oldValue: V, newValue: V): Boolean = replace(k, oldValue, newValue) - - /** - * Update a mapping for the specified key and its current optionally-mapped value - * (`Some` if there is current mapping, `None` if not). - * - * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`. - * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent). - * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged. - * - * If the map is updated by another concurrent access, the remapping function will be retried until successfully updated. - * - * @param key the key value - * @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping - * @return the new value associated with the specified key - */ - override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = updateWithAux(key)(remappingFunction) - - @tailrec - private def updateWithAux(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { - val previousValue = get(key) - val nextValue = remappingFunction(previousValue) - previousValue match { - case Some(prev) => nextValue match { - case Some(next) => if (replaceRefEq(key, prev, next)) return nextValue - case _ => if (removeRefEq(key, prev)) return None - } - case _ => nextValue match { - case Some(next) => if (putIfAbsent(key, next).isEmpty) return nextValue - case _ => return None - } - } - updateWithAux(key)(remappingFunction) - } - - private[collection] def filterInPlaceImpl(p: (K, V) => Boolean): this.type = { - val it = iterator - while (it.hasNext) { - val (k, v) = it.next() - if (!p(k, v)) removeRefEq(k, v) - } - this - } - - private[collection] def mapValuesInPlaceImpl(f: (K, V) => V): this.type = { - val it = iterator - while (it.hasNext) { - val (k, v) = it.next() - replaceRefEq(k, v, f(k, v)) - } - this - } -} diff --git a/tests/pos-special/stdlib/collection/generic/BitOperations.scala b/tests/pos-special/stdlib/collection/generic/BitOperations.scala deleted file mode 100644 index f76619a004fa..000000000000 --- a/tests/pos-special/stdlib/collection/generic/BitOperations.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package generic -import language.experimental.captureChecking - - -/** Some bit operations. - * - * See [[https://www.drmaciver.com/2008/08/unsigned-comparison-in-javascala/]] for - * an explanation of unsignedCompare. - */ -private[collection] object BitOperations { - trait Int { - type Int = scala.Int - def zero(i: Int, mask: Int) = (i & mask) == 0 - def mask(i: Int, mask: Int) = i & (complement(mask - 1) ^ mask) - def hasMatch(key: Int, prefix: Int, m: Int) = mask(key, m) == prefix - def unsignedCompare(i: Int, j: Int) = (i < j) ^ (i < 0) ^ (j < 0) - def shorter(m1: Int, m2: Int) = unsignedCompare(m2, m1) - def complement(i: Int) = (-1) ^ i - def bits(num: Int) = 31 to 0 by -1 map (i => (num >>> i & 1) != 0) - def bitString(num: Int, sep: String = "") = bits(num) map (b => if (b) "1" else "0") mkString sep - def highestOneBit(j: Int) = java.lang.Integer.highestOneBit(j) - } - object Int extends Int - - trait Long { - type Long = scala.Long - def zero(i: Long, mask: Long) = (i & mask) == 0L - def mask(i: Long, mask: Long) = i & (complement(mask - 1) ^ mask) - def hasMatch(key: Long, prefix: Long, m: Long) = mask(key, m) == prefix - def unsignedCompare(i: Long, j: Long) = (i < j) ^ (i < 0L) ^ (j < 0L) - def shorter(m1: Long, m2: Long) = unsignedCompare(m2, m1) - def complement(i: Long) = (-1L) ^ i - def bits(num: Long) = 63L to 0L by -1L map (i => (num >>> i & 1L) != 0L) - def bitString(num: Long, sep: String = "") = bits(num) map (b => if (b) "1" else "0") mkString sep - def highestOneBit(j: Long) = java.lang.Long.highestOneBit(j) - } - object Long extends Long -} diff --git a/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala b/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala deleted file mode 100644 index 7eba9433b8d5..000000000000 --- a/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.generic - -import java.io.{ObjectInputStream, ObjectOutputStream} - -import scala.collection.{Factory, Iterable} -import scala.collection.mutable.Builder -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** The default serialization proxy for collection implementations. - * - * This class is `final` and requires an extra `Factory` object rather than leaving the details of creating a `Builder` - * to an abstract method that could be implemented by a subclass. This is necessary because the factory is needed - * for deserializing this class's private state, which happens before any subclass fields would be deserialized. Any - * additional state required to create the proper `Builder` needs to be captured by the `factory`. - */ -@SerialVersionUID(3L) -final class DefaultSerializationProxy[A](factory: Factory[A, Any], @transient private[this] val coll: Iterable[A]) extends Serializable { - - @transient protected var builder: Builder[A @uncheckedCaptures, Any] = _ - // @uncheckedCaptures OK since builder is used only locally when reading objects - - private[this] def writeObject(out: ObjectOutputStream): Unit = { - out.defaultWriteObject() - val k = coll.knownSize - out.writeInt(k) - var count = 0 - coll.foreach { x => - out.writeObject(x) - count += 1 - } - if(k >= 0) { - if(count != k) throw new IllegalStateException(s"Illegal size $count of collection, expected $k") - } else out.writeObject(SerializeEnd) - } - - private[this] def readObject(in: ObjectInputStream): Unit = { - in.defaultReadObject() - builder = factory.newBuilder - val k = in.readInt() - if(k >= 0) { - builder.sizeHint(k) - var count = 0 - while(count < k) { - builder += in.readObject().asInstanceOf[A] - count += 1 - } - } else { - while (true) in.readObject match { - case SerializeEnd => return - case a => builder += a.asInstanceOf[A] - } - } - } - - protected[this] def readResolve(): Any = builder.result() -} - -@SerialVersionUID(3L) -private[collection] case object SerializeEnd - -/** Mix-in trait to enable DefaultSerializationProxy for the standard collection types. Depending on the type - * it is mixed into, it will dynamically choose `iterableFactory`, `mapFactory`, `sortedIterableFactory` or - * `sortedMapFactory` for deserialization into the respective `CC` type. Override `writeReplace` or implement - * it directly without using this trait if you need a non-standard factory or if you want to use a different - * serialization scheme. - */ -trait DefaultSerializable extends Serializable { this: scala.collection.Iterable[_] => - protected[this] def writeReplace(): AnyRef = { - val f: Factory[Any, Any] = this match { - case it: scala.collection.SortedMap[_, _] => it.sortedMapFactory.sortedMapFactory[Any, Any](it.ordering.asInstanceOf[Ordering[Any]]).asInstanceOf[Factory[Any, Any]] - case it: scala.collection.Map[_, _] => it.mapFactory.mapFactory[Any, Any].asInstanceOf[Factory[Any, Any]] - case it: scala.collection.SortedSet[_] => it.sortedIterableFactory.evidenceIterableFactory[Any](it.ordering.asInstanceOf[Ordering[Any]]) - case it => it.iterableFactory.iterableFactory - } - new DefaultSerializationProxy(f, this) - } -} diff --git a/tests/pos-special/stdlib/collection/generic/IsIterable.scala b/tests/pos-special/stdlib/collection/generic/IsIterable.scala deleted file mode 100644 index c309299b615b..000000000000 --- a/tests/pos-special/stdlib/collection/generic/IsIterable.scala +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package generic -import language.experimental.captureChecking - -/** A trait which can be used to avoid code duplication when defining extension - * methods that should be applicable both to existing Scala collections (i.e., - * types extending `Iterable`) as well as other (potentially user-defined) - * types that could be converted to a Scala collection type. This trait - * makes it possible to treat Scala collections and types that can be implicitly - * converted to a collection type uniformly. For example, one can provide - * extension methods that work both on collection types and on `String`s (`String`s - * do not extend `Iterable`, but can be converted to `Iterable`) - * - * `IsIterable` provides three members: - * - * 1. type member `A`, which represents the element type of the target `Iterable[A]` - * 1. type member `C`, which represents the type returned by transformation operations that preserve the collection’s elements type - * 1. method `apply`, which provides a way to convert between the type we wish to add extension methods to, `Repr`, and `IterableOps[A, Iterable, C]`. - * - * ===Usage=== - * - * One must provide `IsIterable` as an implicit parameter type of an implicit - * conversion. Its usage is shown below. Our objective in the following example - * is to provide a generic extension method `mapReduce` to any type that extends - * or can be converted to `Iterable`. In our example, this includes - * `String`. - * - * {{{ - * import scala.collection.{Iterable, IterableOps} - * import scala.collection.generic.IsIterable - * - * class ExtensionMethods[Repr, I <: IsIterable[Repr]](coll: Repr, it: I) { - * def mapReduce[B](mapper: it.A => B)(reducer: (B, B) => B): B = { - * val iter = it(coll).iterator - * var res = mapper(iter.next()) - * while (iter.hasNext) - * res = reducer(res, mapper(iter.next())) - * res - * } - * } - * - * implicit def withExtensions[Repr](coll: Repr)(implicit it: IsIterable[Repr]): ExtensionMethods[Repr, it.type] = - * new ExtensionMethods(coll, it) - * - * // See it in action! - * List(1, 2, 3).mapReduce(_ * 2)(_ + _) // res0: Int = 12 - * "Yeah, well, you know, that's just, like, your opinion, man.".mapReduce(x => 1)(_ + _) // res1: Int = 59 - *}}} - * - * Here, we begin by creating a class `ExtensionMethods` which contains our - * `mapReduce` extension method. - * - * Note that `ExtensionMethods` takes a constructor argument `coll` of type `Repr`, where - * `Repr` represents (typically) the collection type, and an argument `it` of a subtype of `IsIterable[Repr]`. - * The body of the method starts by converting the `coll` argument to an `IterableOps` in order to - * call the `iterator` method on it. - * The remaining of the implementation is straightforward. - * - * The `withExtensions` implicit conversion makes the `mapReduce` operation available - * on any type `Repr` for which it exists an implicit `IsIterable[Repr]` instance. - * Note how we keep track of the precise type of the implicit `it` argument by using the - * `it.type` singleton type, rather than the wider `IsIterable[Repr]` type. We do that - * so that the information carried by the type members `A` and `C` of the `it` argument - * is not lost. - * - * When the `mapReduce` method is called on some type of which it is not - * a member, implicit search is triggered. Because implicit conversion - * `withExtensions` is generic, it will be applied as long as an implicit - * value of type `IsIterable[Repr]` can be found. Given that the - * `IsIterable` companion object contains implicit members that return values of type - * `IsIterable`, this requirement is typically satisfied, and the chain - * of interactions described in the previous paragraph is set into action. - * (See the `IsIterable` companion object, which contains a precise - * specification of the available implicits.) - * - * ''Note'': Currently, it's not possible to combine the implicit conversion and - * the class with the extension methods into an implicit class due to - * limitations of type inference. - * - * ===Implementing `IsIterable` for New Types=== - * - * One must simply provide an implicit value of type `IsIterable` - * specific to the new type, or an implicit conversion which returns an - * instance of `IsIterable` specific to the new type. - * - * Below is an example of an implementation of the `IsIterable` trait - * where the `Repr` type is `Range`. - * - *{{{ - * implicit val rangeRepr: IsIterable[Range] { type A = Int; type C = IndexedSeq[Int] } = - * new IsIterable[Range] { - * type A = Int - * type C = IndexedSeq[Int] - * def apply(coll: Range): IterableOps[Int, IndexedSeq, IndexedSeq[Int]] = coll - * } - *}}} - * - * (Note that in practice the `IsIterable[Range]` instance is already provided by - * the standard library, and it is defined as an `IsSeq[Range]` instance) - */ -trait IsIterable[Repr] extends IsIterableOnce[Repr] { - - /** The type returned by transformation operations that preserve the same elements - * type (e.g. `filter`, `take`). - * - * In practice, this type is often `Repr` itself, excepted in the case - * of `SeqView[A]` (and other `View[A]` subclasses), where it is “only” `View[A]`. - */ - type C - - @deprecated("'conversion' is now a method named 'apply'", "2.13.0") - override val conversion: Repr => IterableOps[A, Iterable, C] = apply(_) - - /** A conversion from the type `Repr` to `IterableOps[A, Iterable, C]` */ - def apply(coll: Repr): IterableOps[A, Iterable, C] - -} - -object IsIterable extends IsIterableLowPriority { - - // Straightforward case: IterableOps subclasses - implicit def iterableOpsIsIterable[A0, CC0[X] <: IterableOps[X, Iterable, CC0[X]]]: IsIterable[CC0[A0]] { type A = A0; type C = CC0[A0] } = - new IsIterable[CC0[A0]] { - type A = A0 - type C = CC0[A0] - def apply(coll: CC0[A]): IterableOps[A, Iterable, C] = coll - } - - // The `BitSet` type can not be unified with the `CC0` parameter of - // the above definition because it does not take a type parameter. - // Hence the need for a separate case: - implicit def bitSetOpsIsIterable[C0 <: BitSet with BitSetOps[C0]]: IsIterable[C0] { type A = Int; type C = C0 } = - new IsIterable[C0] { - type A = Int - type C = C0 - def apply(coll: C0): IterableOps[Int, Iterable, C0] = coll - } - -} - -trait IsIterableLowPriority { - - // Makes `IsSeq` instances visible in `IsIterable` companion - implicit def isSeqLikeIsIterable[Repr](implicit - isSeqLike: IsSeq[Repr] - ): IsIterable[Repr] { type A = isSeqLike.A; type C = isSeqLike.C } = isSeqLike - - // Makes `IsMap` instances visible in `IsIterable` companion - implicit def isMapLikeIsIterable[Repr](implicit - isMapLike: IsMap[Repr] - ): IsIterable[Repr] { type A = isMapLike.A; type C = isMapLike.C } = isMapLike - -} diff --git a/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala b/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala deleted file mode 100644 index 2836ca2bb520..000000000000 --- a/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic -import language.experimental.captureChecking - -/** Type class witnessing that a collection representation type `Repr` has - * elements of type `A` and has a conversion to `IterableOnce[A]`. - * - * This type enables simple enrichment of `IterableOnce`s with extension - * methods which can make full use of the mechanics of the Scala collections - * framework in their implementation. - * - * Example usage, - * {{{ - * class FilterMapImpl[Repr, I <: IsIterableOnce[Repr]](coll: Repr, it: I) { - * final def filterMap[B, That](f: it.A => Option[B])(implicit bf: BuildFrom[Repr, B, That]): That = { - * val b = bf.newBuilder(coll) - * for(e <- it(coll).iterator) f(e) foreach (b +=) - * b.result() - * } - * } - * implicit def filterMap[Repr](coll: Repr)(implicit it: IsIterableOnce[Repr]): FilterMapImpl[Repr, it.type] = - * new FilterMapImpl(coll, it) - * - * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None) - * // == List(2, 4) - * }}} - */ -trait IsIterableOnce[Repr] { - - /** The type of elements we can traverse over (e.g. `Int`). */ - type A - - @deprecated("'conversion' is now a method named 'apply'", "2.13.0") - val conversion: Repr => IterableOnce[A] = apply(_) - - /** A conversion from the representation type `Repr` to a `IterableOnce[A]`. */ - def apply(coll: Repr): IterableOnce[A] - -} - -object IsIterableOnce extends IsIterableOnceLowPriority { - - // Straightforward case: IterableOnce subclasses - implicit def iterableOnceIsIterableOnce[CC0[A] <: IterableOnce[A], A0]: IsIterableOnce[CC0[A0]] { type A = A0 } = - new IsIterableOnce[CC0[A0]] { - type A = A0 - def apply(coll: CC0[A0]): IterableOnce[A0] = coll - } - -} - -trait IsIterableOnceLowPriority { - - // Makes `IsIterable` instance visible in `IsIterableOnce` companion - implicit def isIterableLikeIsIterableOnce[Repr](implicit - isIterableLike: IsIterable[Repr] - ): IsIterableOnce[Repr] { type A = isIterableLike.A } = isIterableLike - -} diff --git a/tests/pos-special/stdlib/collection/generic/IsMap.scala b/tests/pos-special/stdlib/collection/generic/IsMap.scala deleted file mode 100644 index ad7254d2dd61..000000000000 --- a/tests/pos-special/stdlib/collection/generic/IsMap.scala +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package generic - -import IsMap.Tupled -import scala.collection.immutable.{IntMap, LongMap} -import language.experimental.captureChecking - -/** - * Type class witnessing that a collection type `Repr` - * has keys of type `K`, values of type `V` and has a conversion to - * `MapOps[K, V, Iterable, C]`, for some types `K`, `V` and `C`. - * - * This type enables simple enrichment of `Map`s with extension methods. - * - * @see [[scala.collection.generic.IsIterable]] - * @tparam Repr Collection type (e.g. `Map[Int, String]`) - */ -trait IsMap[Repr] extends IsIterable[Repr] { - - /** The type of keys */ - type K - - /** The type of values */ - type V - - type A = (K, V) - - /** A conversion from the type `Repr` to `MapOps[K, V, Iterable, C]` - * - * @note The third type parameter of the returned `MapOps` value is - * still `Iterable` (and not `Map`) because `MapView[K, V]` only - * extends `MapOps[K, V, View, View[A]]`. - */ - override def apply(c: Repr): MapOps[K, V, Tupled[Iterable]#Ap, C] - -} - -object IsMap { - - /** Convenient type level function that takes a unary type constructor `F[_]` - * and returns a binary type constructor that tuples its parameters and passes - * them to `F`. - * - * `Tupled[F]#Ap` is equivalent to `({ type Ap[X, +Y] = F[(X, Y)] })#Ap`. - */ - type Tupled[F[+_]] = { type Ap[X, Y] = F[(X, Y)] } - - // Map collections - implicit def mapOpsIsMap[CC0[X, Y] <: MapOps[X, Y, Tupled[Iterable]#Ap, CC0[X, Y]], K0, V0]: IsMap[CC0[K0, V0]] { type K = K0; type V = V0; type C = CC0[K, V] } = - new IsMap[CC0[K0, V0]] { - type K = K0 - type V = V0 - type C = CC0[K0, V0] - def apply(c: CC0[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, C] = c - } - - // MapView - implicit def mapViewIsMap[CC0[X, Y] <: MapView[X, Y], K0, V0]: IsMap[CC0[K0, V0]] { type K = K0; type V = V0; type C = View[(K0, V0)] } = - new IsMap[CC0[K0, V0]] { - type K = K0 - type V = V0 - type C = View[(K, V)] - def apply(c: CC0[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, View[(K0, V0)]] = c - } - - // AnyRefMap has stricter bounds than the ones used by the mapOpsIsMap definition - implicit def anyRefMapIsMap[K0 <: AnyRef, V0]: IsMap[mutable.AnyRefMap[K0, V0]] { type K = K0; type V = V0; type C = mutable.AnyRefMap[K0, V0] } = - new IsMap[mutable.AnyRefMap[K0, V0]] { - type K = K0 - type V = V0 - type C = mutable.AnyRefMap[K0, V0] - def apply(c: mutable.AnyRefMap[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, mutable.AnyRefMap[K0, V0]] = c - } - - // IntMap takes one type parameter only whereas mapOpsIsMap uses a parameter CC0 with two type parameters - implicit def intMapIsMap[V0]: IsMap[IntMap[V0]] { type K = Int; type V = V0; type C = IntMap[V0] } = - new IsMap[IntMap[V0]] { - type K = Int - type V = V0 - type C = IntMap[V0] - def apply(c: IntMap[V0]): MapOps[Int, V0, Tupled[Iterable]#Ap, IntMap[V0]] = c - } - - // LongMap is in a similar situation as IntMap - implicit def longMapIsMap[V0]: IsMap[LongMap[V0]] { type K = Long; type V = V0; type C = LongMap[V0] } = - new IsMap[LongMap[V0]] { - type K = Long - type V = V0 - type C = LongMap[V0] - def apply(c: LongMap[V0]): MapOps[Long, V0, Tupled[Iterable]#Ap, LongMap[V0]] = c - } - - // mutable.LongMap is in a similar situation as LongMap and IntMap - implicit def mutableLongMapIsMap[V0]: IsMap[mutable.LongMap[V0]] { type K = Long; type V = V0; type C = mutable.LongMap[V0] } = - new IsMap[mutable.LongMap[V0]] { - type K = Long - type V = V0 - type C = mutable.LongMap[V0] - def apply(c: mutable.LongMap[V0]): MapOps[Long, V0, Tupled[Iterable]#Ap, mutable.LongMap[V0]] = c - } - - -} diff --git a/tests/pos-special/stdlib/collection/generic/IsSeq.scala b/tests/pos-special/stdlib/collection/generic/IsSeq.scala deleted file mode 100644 index 8ad344c4d4fc..000000000000 --- a/tests/pos-special/stdlib/collection/generic/IsSeq.scala +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package generic - -import scala.reflect.ClassTag -import language.experimental.captureChecking -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** Type class witnessing that a collection representation type `Repr` has - * elements of type `A` and has a conversion to `SeqOps[A, Iterable, C]`, for - * some types `A` and `C`. - * - * This type enables simple enrichment of `Seq`s with extension methods which - * can make full use of the mechanics of the Scala collections framework in - * their implementation. - * - * @see [[scala.collection.generic.IsIterable]] - */ -trait IsSeq[Repr] extends IsIterable[Repr] { - - @deprecated("'conversion' is now a method named 'apply'", "2.13.0") - override val conversion: Repr => SeqOps[A, Iterable, C] = apply(_) - - /** A conversion from the type `Repr` to `SeqOps[A, Iterable, C]` - * - * @note The second type parameter of the returned `SeqOps` value is - * still `Iterable` (and not `Seq`) because `SeqView[A]` only - * extends `SeqOps[A, View, View[A]]`. - */ - def apply(coll: Repr): SeqOps[A, Iterable, C] -} - -object IsSeq { - - private val seqOpsIsSeqVal: IsSeq[Seq[Any]] = - new IsSeq[Seq[Any]] { - type A = Any - type C = Any - def apply(coll: Seq[Any]): SeqOps[Any, Iterable, Any] = coll - } - - implicit def seqOpsIsSeq[CC0[X] <: SeqOps[X, Iterable, CC0[X]], A0]: IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] } = - seqOpsIsSeqVal.asInstanceOf[IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] }] - - /** !!! Under cc, views are not Seqs and can't use SeqOps. - * So this should be renamed to seqViewIsIterable - */ - implicit def seqViewIsSeq[CC0[X] <: SeqView[X], A0]: IsIterable[CC0[A0]] { type A = A0; type C = View[A0] } = - new IsIterable[CC0[A0]] { - type A = A0 - type C = View[A] - def apply(coll: CC0[A0]): IterableOps[A0, View, View[A0]] = coll - } - - /** !!! Under cc, views are not Seqs and can't use SeqOps. - * So this should be renamed to stringViewIsIterable - */ - implicit val stringViewIsSeq: IsIterable[StringView] { type A = Char; type C = View[Char] } = - new IsIterable[StringView] { - type A = Char - type C = View[Char] - def apply(coll: StringView): IterableOps[Char, View, View[Char]] = coll - } - - implicit val stringIsSeq: IsSeq[String] { type A = Char; type C = String } = - new IsSeq[String] { - type A = Char - type C = String - def apply(s: String): SeqOps[Char, immutable.IndexedSeq, String] = - new SeqOps[Char, immutable.ArraySeq, String] { - def length: Int = s.length - def apply(i: Int): Char = s.charAt(i) - def toIterable: Iterable[Char] = new immutable.WrappedString(s) - protected[this] def coll: String = s - protected[this] def fromSpecific(coll: IterableOnce[Char]^): String = coll.iterator.mkString - def iterableFactory: FreeSeqFactory[immutable.ArraySeq] = immutable.ArraySeq.untagged - override def empty: String = "" - protected[this] def newSpecificBuilder: mutable.Builder[Char, String] = new StringBuilder - def iterator: Iterator[Char] = s.iterator - } - } - - implicit def arrayIsSeq[sealed A0 : ClassTag]: IsSeq[Array[A0]] { type A = A0; type C = Array[A0] } = - new IsSeq[Array[A0]] { - type A = A0 - type C = Array[A0] - def apply(a: Array[A0]): SeqOps[A0, Seq, Array[A0]] = - new SeqOps[A, mutable.ArraySeq, Array[A]] { - def apply(i: Int): A = a(i) - def length: Int = a.length - def toIterable: Iterable[A] = mutable.ArraySeq.make[A @uncheckedCaptures](a) - protected def coll: Array[A] = a - protected def fromSpecific(coll: IterableOnce[A]^): Array[A] = Array.from(coll) - def iterableFactory: FreeSeqFactory[mutable.ArraySeq] = mutable.ArraySeq.untagged - override def empty: Array[A] = Array.empty[A] - protected def newSpecificBuilder: mutable.Builder[A, Array[A]] = Array.newBuilder - def iterator: Iterator[A] = a.iterator - } - } - - // `Range` can not be unified with the `CC0` parameter of the - // `seqOpsIsSeq` definition because it does not take a type parameter. - // Hence the need for a separate case: - implicit def rangeIsSeq[C0 <: Range]: IsSeq[C0] { type A = Int; type C = immutable.IndexedSeq[Int] } = - new IsSeq[C0] { - type A = Int - type C = immutable.IndexedSeq[Int] - def apply(coll: C0): SeqOps[Int, Seq, immutable.IndexedSeq[Int]] = coll - } - -} diff --git a/tests/pos-special/stdlib/collection/generic/Subtractable.scala b/tests/pos-special/stdlib/collection/generic/Subtractable.scala deleted file mode 100644 index 2c0967dbaf4b..000000000000 --- a/tests/pos-special/stdlib/collection/generic/Subtractable.scala +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package generic -import language.experimental.captureChecking - -/** This trait represents collection-like objects that can be reduced - * using a '+' operator. It defines variants of `-` and `--` - * as convenience methods in terms of single-element removal `-`. - * - * @tparam A the type of the elements of the $coll. - * @tparam Repr the type of the $coll itself - * @define coll collection - * @define Coll Subtractable - */ -@deprecated("Subtractable is deprecated. This is now implemented as part of SetOps, MapOps, etc.", "2.13.0") -trait Subtractable[A, +Repr <: Subtractable[A, Repr]] { self => - - /** The representation object of type `Repr` which contains the collection's elements - */ - protected def repr: Repr - - /** Creates a new $coll from this $coll with an element removed. - * @param elem the element to remove - * @return a new collection that contains all elements of the current $coll - * except one less occurrence of `elem`. - */ - def -(elem: A): Repr - - /** Creates a new $coll from this $coll with some elements removed. - * - * This method takes two or more elements to be removed. Another overloaded - * variant of this method handles the case where a single element is - * removed. - * @param elem1 the first element to remove. - * @param elem2 the second element to remove. - * @param elems the remaining elements to remove. - * @return a new $coll that contains all elements of the current $coll - * except one less occurrence of each of the given elements. - */ - def -(elem1: A, elem2: A, elems: A*): Repr = - this - elem1 - elem2 -- elems - - /** Creates a new $coll from this $coll by removing all elements of another - * collection. - * - * @param xs the collection containing the removed elements. - * @return a new $coll that contains all elements of the current $coll - * except one less occurrence of each of the elements of `elems`. - */ - def --(xs: IterableOnce[A]): Repr = (repr /: xs.iterator) (_ - _) -} diff --git a/tests/pos-special/stdlib/collection/generic/package.scala b/tests/pos-special/stdlib/collection/generic/package.scala deleted file mode 100644 index 0ba67c1bf76e..000000000000 --- a/tests/pos-special/stdlib/collection/generic/package.scala +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -import language.experimental.captureChecking - - -package object generic { - @deprecated("Clearable was moved from collection.generic to collection.mutable", "2.13.0") - type Clearable = scala.collection.mutable.Clearable - - @deprecated("Use scala.collection.BuildFrom instead", "2.13.0") - type CanBuildFrom[-From, -A, +C] = scala.collection.BuildFrom[From, A, C] - - @deprecated("Growable was moved from collection.generic to collection.mutable", "2.13.0") - type Growable[-A] = scala.collection.mutable.Growable[A] - - @deprecated("Shrinkable was moved from collection.generic to collection.mutable", "2.13.0") - type Shrinkable[-A] = scala.collection.mutable.Shrinkable[A] - - @deprecated("Use IsIterable instead", "2.13.0") - type IsTraversableLike[Repr] = IsIterable[Repr] - - @deprecated("Use IsIterableOnce instead", "2.13.0") - type IsTraversableOnce[Repr] = IsIterableOnce[Repr] -} diff --git a/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala b/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala deleted file mode 100644 index 3a221fc76b6c..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala +++ /dev/null @@ -1,692 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package immutable - -import java.util.Arrays - -import scala.annotation.unchecked.uncheckedVariance -import scala.collection.Stepper.EfficientSplit -import scala.collection.mutable.{ArrayBuffer, ArrayBuilder, Builder, ArraySeq => MutableArraySeq} -import scala.collection.convert.impl._ -import scala.reflect.ClassTag -import scala.runtime.ScalaRunTime -import scala.util.Sorting -import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** - * An immutable array. - * - * Supports efficient indexed access and has a small memory footprint. - * - * @define coll immutable array - * @define Coll `ArraySeq` - */ -sealed abstract class ArraySeq[+A] - extends AbstractSeq[A] - with IndexedSeq[A] - with IndexedSeqOps[A, ArraySeq, ArraySeq[A]] - with StrictOptimizedSeqOps[A, ArraySeq, ArraySeq[A]] - with EvidenceIterableFactoryDefaults[A, ArraySeq, ClassTag] - with Serializable - with Pure { - - /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive - * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype - * or subtype of the element type. */ - protected def elemTag: ClassTag[_] - - override def iterableFactory: SeqFactory[ArraySeq] = ArraySeq.untagged - - /** The wrapped mutable `Array` that backs this `ArraySeq`. Any changes to this array will break - * the expected immutability. Its element type does not have to be equal to the element type of this ArraySeq. - * A primitive ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an - * array of a supertype or subtype of the element type. */ - def unsafeArray: Array[_] - - def unsafeArrayAsAnyArray = unsafeArray.asInstanceOf[Array[Any]] - - protected def evidenceIterableFactory: ArraySeq.type = ArraySeq - protected def iterableEvidence: ClassTag[A @uncheckedVariance @uncheckedCaptures] = elemTag.asInstanceOf[ClassTag[A]] - - def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit - - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): A - - override def updated[B >: A](index: Int, elem: B): ArraySeq[B] = { - val dest = new Array[Any](length) - Array.copy(unsafeArray, 0, dest, 0, length) - dest(index) = elem - ArraySeq.unsafeWrapArray(dest).asInstanceOf[ArraySeq[B]] - } - - override def map[B](f: A => B): ArraySeq[B] = { - val a = new Array[Any](size) - var i = 0 - while (i < a.length){ - a(i) = f(apply(i)) - i += 1 - } - ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] - } - - override def prepended[B >: A](elem: B): ArraySeq[B] = - ArraySeq.unsafeWrapArray(unsafeArrayAsAnyArray.prepended(elem)).asInstanceOf[ArraySeq[B]] - - override def appended[B >: A](elem: B): ArraySeq[B] = - ArraySeq.unsafeWrapArray(unsafeArrayAsAnyArray.appended[Any](elem)).asInstanceOf[ArraySeq[B]] - - /** Fast concatenation of two [[ArraySeq]]s. - * - * @return null if optimisation not possible. - */ - private def appendedAllArraySeq[B >: A](that: ArraySeq[B]): ArraySeq[B] = { - // Optimise concatenation of two ArraySeqs - // For ArraySeqs with sizes of [100, 1000, 10000] this is [3.5, 4.1, 5.2]x as fast - if (isEmpty) - that - else if (that.isEmpty) - this - else { - val thisIsObj = this.unsafeArray.isInstanceOf[Array[AnyRef]] - val thatIsObj = that.unsafeArray.isInstanceOf[Array[AnyRef]] - val mismatch = thisIsObj != thatIsObj - if (mismatch) - // Combining primatives and objects: abort - null - else if (thisIsObj) { - // A and B are objects - val ax = this.unsafeArray.asInstanceOf[Array[A @uncheckedCaptures]] - val ay = that.unsafeArray.asInstanceOf[Array[B @uncheckedCaptures]] - val len = ax.length + ay.length - val a = new Array[AnyRef](len) - System.arraycopy(ax, 0, a, 0, ax.length) - System.arraycopy(ay, 0, a, ax.length, ay.length) - ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] - } else { - // A is a primative and B = A. Use this instance's protected ClassTag. - val ax = this.unsafeArray.asInstanceOf[Array[A @uncheckedCaptures]] - val ay = that.unsafeArray.asInstanceOf[Array[A @uncheckedCaptures]] - val len = ax.length + ay.length - val a = iterableEvidence.newArray(len) - System.arraycopy(ax, 0, a, 0, ax.length) - System.arraycopy(ay, 0, a, ax.length, ay.length) - ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] - } - } - } - - override def appendedAll[B >: A](suffix: collection.IterableOnce[B]^): ArraySeq[B] = { - def genericResult = { - val k = suffix.knownSize - if (k == 0) this - else { - val b = ArrayBuilder.make[Any] - if(k >= 0) b.sizeHint(k + unsafeArray.length) - b.addAll(unsafeArray) - b.addAll(suffix) - ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[ArraySeq[B]] - } - } - - suffix match { - case that: ArraySeq[_] => - val result = appendedAllArraySeq(that.asInstanceOf[ArraySeq[B]]) - if (result == null) genericResult - else result - case _ => - genericResult - } - } - - override def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): ArraySeq[B] = { - def genericResult = { - val k = prefix.knownSize - if (k == 0) this - else { - val b = ArrayBuilder.make[Any] - if(k >= 0) b.sizeHint(k + unsafeArray.length) - b.addAll(prefix) - if(k < 0) b.sizeHint(b.length + unsafeArray.length) - b.addAll(unsafeArray) - ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[ArraySeq[B]] - } - } - - prefix match { - case that: ArraySeq[_] => - val result = that.asInstanceOf[ArraySeq[B]].appendedAllArraySeq(this) - if (result == null) genericResult - else result - case _ => - genericResult - } - } - - override def zip[B](that: collection.IterableOnce[B]^): ArraySeq[(A, B)] = - that match { - case bs: ArraySeq[B] => - ArraySeq.tabulate(length min bs.length) { i => - (apply(i), bs(i)) - } - case _ => - strictOptimizedZip[B, ArraySeq[(A, B)]](that, iterableFactory.newBuilder) - } - - private inline def ops[A](xs: Array[A @uncheckedCaptures]): ArrayOps[A] = new ArrayOps[A @uncheckedCaptures](xs) - - override def take(n: Int): ArraySeq[A] = - if (unsafeArray.length <= n) - this - else - ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).take(n)).asInstanceOf[ArraySeq[A]] - - override def takeRight(n: Int): ArraySeq[A] = - if (unsafeArray.length <= n) - this - else - ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).takeRight(n)).asInstanceOf[ArraySeq[A]] - - override def drop(n: Int): ArraySeq[A] = - if (n <= 0) - this - else - ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).drop(n)).asInstanceOf[ArraySeq[A]] - - override def dropRight(n: Int): ArraySeq[A] = - if (n <= 0) - this - else - ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).dropRight(n)).asInstanceOf[ArraySeq[A]] - - override def slice(from: Int, until: Int): ArraySeq[A] = - if (from <= 0 && unsafeArray.length <= until) - this - else - ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).slice(from, until)).asInstanceOf[ArraySeq[A]] - - override def foldLeft[B](z: B)(f: (B, A) => B): B = { - // For ArraySeqs with sizes of [100, 1000, 10000] this is [1.3, 1.8, 1.8]x as fast - // as the same while-loop over this instead of unsafeArray. - val array = unsafeArray - var b = z - var i = 0 - while (i < array.length) { - val a = array(i).asInstanceOf[A] - b = f(b, a) - i += 1 - } - b - } - - override def foldRight[B](z: B)(f: (A, B) => B): B = { - // For ArraySeqs with sizes of [100, 1000, 10000] this is [1.6, 1.8, 2.7]x as fast - // as the same while-loop over this instead of unsafeArray. - val array = unsafeArray - var b = z - var i = array.length - while (i > 0) { - i -= 1 - val a = array(i).asInstanceOf[A] - b = f(a, b) - } - b - } - - override def tail: ArraySeq[A] = ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).tail).asInstanceOf[ArraySeq[A]] - - override def reverse: ArraySeq[A] = ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).reverse).asInstanceOf[ArraySeq[A]] - - override protected[this] def className = "ArraySeq" - - override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { - val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) - if(copied > 0) { - Array.copy(unsafeArray, 0, xs, start, copied) - } - copied - } - - override protected final def applyPreferredMaxLength: Int = Int.MaxValue - - override def sorted[B >: A](implicit ord: Ordering[B]): ArraySeq[A] = - if(unsafeArray.length <= 1) this - else { - val a = Array.copyAs[AnyRef](unsafeArray, length)(ClassTag.AnyRef) - Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]) - new ArraySeq.ofRef[AnyRef](a).asInstanceOf[ArraySeq[A]] - } -} - -/** - * $factoryInfo - * @define coll immutable array - * @define Coll `ArraySeq` - */ -@SerialVersionUID(3L) -object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => - val untagged: SeqFactory[ArraySeq] = new ClassTagSeqFactory.AnySeqDelegate(self) - - private[this] lazy val emptyImpl = new ArraySeq.ofRef[Nothing](new Array[Nothing](0)) - - def empty[A : ClassTag]: ArraySeq[A] = emptyImpl - - def from[A](it: scala.collection.IterableOnce[A]^)(implicit tag: ClassTag[A]): ArraySeq[A] = it match { - case as: ArraySeq[A] => as - case _ => unsafeWrapArray(Array.from[A](it)) - } - - def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] = - ArrayBuffer.newBuilder[A @uncheckedCaptures].mapResult(b => unsafeWrapArray[A](b.toArray)) - - override def fill[A : ClassTag](n: Int)(elem: => A): ArraySeq[A] = tabulate(n)(_ => elem) - - override def tabulate[A : ClassTag](n: Int)(f: Int => A): ArraySeq[A] = { - val elements = Array.ofDim[A @uncheckedCaptures](scala.math.max(n, 0)) - var i = 0 - while (i < n) { - ScalaRunTime.array_update(elements, i, f(i)) - i = i + 1 - } - ArraySeq.unsafeWrapArray(elements) - } - - /** - * Wrap an existing `Array` into an `ArraySeq` of the proper primitive specialization type - * without copying. Any changes to wrapped array will break the expected immutability. - * - * Note that an array containing boxed primitives can be wrapped in an `ArraySeq` without - * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime, - * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast: - * `ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[Int]]`. The values are still - * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing - * `ArraySeq.unsafeWrapArray(a.asInstanceOf[Array[Int]])` does not work, it throws a - * `ClassCastException` at runtime. - */ - def unsafeWrapArray[T](x: Array[T @uncheckedCaptures]): ArraySeq[T] = ((x: @unchecked) match { - case null => null - case x: Array[AnyRef] => new ofRef[AnyRef](x) - case x: Array[Int] => new ofInt(x) - case x: Array[Double] => new ofDouble(x) - case x: Array[Long] => new ofLong(x) - case x: Array[Float] => new ofFloat(x) - case x: Array[Char] => new ofChar(x) - case x: Array[Byte] => new ofByte(x) - case x: Array[Short] => new ofShort(x) - case x: Array[Boolean] => new ofBoolean(x) - case x: Array[Unit] => new ofUnit(x) - }).asInstanceOf[ArraySeq[T]] - - @SerialVersionUID(3L) - final class ofRef[T <: AnyRef](val unsafeArray: Array[T]) extends ArraySeq[T] { - def elemTag = ClassTag[T](unsafeArray.getClass.getComponentType) - def length: Int = unsafeArray.length - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): T = unsafeArray(i) - override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) - override def equals(that: Any): Boolean = that match { - case that: ofRef[_] => - Array.equals( - this.unsafeArray.asInstanceOf[Array[AnyRef]], - that.unsafeArray.asInstanceOf[Array[AnyRef]]) - case _ => super.equals(that) - } - override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq.ofRef[T] = { - if(unsafeArray.length <= 1) this - else { - val a = unsafeArray.clone() - Arrays.sort(a, ord.asInstanceOf[Ordering[T]]) - new ArraySeq.ofRef(a) - } - } - override def iterator: Iterator[T] = new ArrayOps.ArrayIterator[T](unsafeArray) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - new ObjectArrayStepper(unsafeArray, 0, unsafeArray.length) - else shape.parUnbox(new ObjectArrayStepper(unsafeArray, 0, unsafeArray.length).asInstanceOf[AnyStepper[T] with EfficientSplit]) - ).asInstanceOf[S with EfficientSplit] - } - - @SerialVersionUID(3L) - final class ofByte(val unsafeArray: Array[Byte]) extends ArraySeq[Byte] { - protected def elemTag = ClassTag.Byte - def length: Int = unsafeArray.length - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): Byte = unsafeArray(i) - override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) - override def equals(that: Any) = that match { - case that: ofByte => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - override def sorted[B >: Byte](implicit ord: Ordering[B]): ArraySeq[Byte] = - if(length <= 1) this - else if(ord eq Ordering.Byte) { - val a = unsafeArray.clone() - Arrays.sort(a) - new ArraySeq.ofByte(a) - } else super.sorted[B] - override def iterator: Iterator[Byte] = new ArrayOps.ArrayIterator[Byte](unsafeArray) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Byte, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParIntStepper(new WidenedByteArrayStepper(unsafeArray, 0, unsafeArray.length)) - else new WidenedByteArrayStepper(unsafeArray, 0, unsafeArray.length) - ).asInstanceOf[S with EfficientSplit] - override def updated[B >: Byte](index: Int, elem: B): ArraySeq[B] = - elem match { - case b: Byte => new ArraySeq.ofByte(unsafeArray.updated(index, b)) - case _ => super.updated(index, elem) - } - override def appended[B >: Byte](elem: B): ArraySeq[B] = - elem match { - case b: Byte => new ArraySeq.ofByte(unsafeArray.appended(b)) - case _ => super.appended(elem) - } - override def prepended[B >: Byte](elem: B): ArraySeq[B] = - elem match { - case b: Byte => new ArraySeq.ofByte(unsafeArray.prepended(b)) - case _ => super.prepended(elem) - } - } - - @SerialVersionUID(3L) - final class ofShort(val unsafeArray: Array[Short]) extends ArraySeq[Short] { - protected def elemTag = ClassTag.Short - def length: Int = unsafeArray.length - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): Short = unsafeArray(i) - override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) - override def equals(that: Any) = that match { - case that: ofShort => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - override def sorted[B >: Short](implicit ord: Ordering[B]): ArraySeq[Short] = - if(length <= 1) this - else if(ord eq Ordering.Short) { - val a = unsafeArray.clone() - Arrays.sort(a) - new ArraySeq.ofShort(a) - } else super.sorted[B] - override def iterator: Iterator[Short] = new ArrayOps.ArrayIterator[Short](unsafeArray) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Short, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParIntStepper(new WidenedShortArrayStepper(unsafeArray, 0, unsafeArray.length)) - else new WidenedShortArrayStepper(unsafeArray, 0, unsafeArray.length) - ).asInstanceOf[S with EfficientSplit] - override def updated[B >: Short](index: Int, elem: B): ArraySeq[B] = - elem match { - case b: Short => new ArraySeq.ofShort(unsafeArray.updated(index, b)) - case _ => super.updated(index, elem) - } - override def appended[B >: Short](elem: B): ArraySeq[B] = - elem match { - case b: Short => new ArraySeq.ofShort(unsafeArray.appended(b)) - case _ => super.appended(elem) - } - override def prepended[B >: Short](elem: B): ArraySeq[B] = - elem match { - case b: Short => new ArraySeq.ofShort(unsafeArray.prepended(b)) - case _ => super.prepended(elem) - } - } - - @SerialVersionUID(3L) - final class ofChar(val unsafeArray: Array[Char]) extends ArraySeq[Char] { - protected def elemTag = ClassTag.Char - def length: Int = unsafeArray.length - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): Char = unsafeArray(i) - override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) - override def equals(that: Any) = that match { - case that: ofChar => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - override def sorted[B >: Char](implicit ord: Ordering[B]): ArraySeq[Char] = - if(length <= 1) this - else if(ord eq Ordering.Char) { - val a = unsafeArray.clone() - Arrays.sort(a) - new ArraySeq.ofChar(a) - } else super.sorted[B] - override def iterator: Iterator[Char] = new ArrayOps.ArrayIterator[Char](unsafeArray) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParIntStepper(new WidenedCharArrayStepper(unsafeArray, 0, unsafeArray.length)) - else new WidenedCharArrayStepper(unsafeArray, 0, unsafeArray.length) - ).asInstanceOf[S with EfficientSplit] - override def updated[B >: Char](index: Int, elem: B): ArraySeq[B] = - elem match { - case b: Char => new ArraySeq.ofChar(unsafeArray.updated(index, b)) - case _ => super.updated(index, elem) - } - override def appended[B >: Char](elem: B): ArraySeq[B] = - elem match { - case b: Char => new ArraySeq.ofChar(unsafeArray.appended(b)) - case _ => super.appended(elem) - } - override def prepended[B >: Char](elem: B): ArraySeq[B] = - elem match { - case b: Char => new ArraySeq.ofChar(unsafeArray.prepended(b)) - case _ => super.prepended(elem) - } - - override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = - (new MutableArraySeq.ofChar(unsafeArray)).addString(sb, start, sep, end) - } - - @SerialVersionUID(3L) - final class ofInt(val unsafeArray: Array[Int]) extends ArraySeq[Int] { - protected def elemTag = ClassTag.Int - def length: Int = unsafeArray.length - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): Int = unsafeArray(i) - override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) - override def equals(that: Any) = that match { - case that: ofInt => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - override def sorted[B >: Int](implicit ord: Ordering[B]): ArraySeq[Int] = - if(length <= 1) this - else if(ord eq Ordering.Int) { - val a = unsafeArray.clone() - Arrays.sort(a) - new ArraySeq.ofInt(a) - } else super.sorted[B] - override def iterator: Iterator[Int] = new ArrayOps.ArrayIterator[Int](unsafeArray) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParIntStepper(new IntArrayStepper(unsafeArray, 0, unsafeArray.length)) - else new IntArrayStepper(unsafeArray, 0, unsafeArray.length) - ).asInstanceOf[S with EfficientSplit] - override def updated[B >: Int](index: Int, elem: B): ArraySeq[B] = - elem match { - case b: Int => new ArraySeq.ofInt(unsafeArray.updated(index, b)) - case _ => super.updated(index, elem) - } - override def appended[B >: Int](elem: B): ArraySeq[B] = - elem match { - case b: Int => new ArraySeq.ofInt(unsafeArray.appended(b)) - case _ => super.appended(elem) - } - override def prepended[B >: Int](elem: B): ArraySeq[B] = - elem match { - case b: Int => new ArraySeq.ofInt(unsafeArray.prepended(b)) - case _ => super.prepended(elem) - } - } - - @SerialVersionUID(3L) - final class ofLong(val unsafeArray: Array[Long]) extends ArraySeq[Long] { - protected def elemTag = ClassTag.Long - def length: Int = unsafeArray.length - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): Long = unsafeArray(i) - override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) - override def equals(that: Any) = that match { - case that: ofLong => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - override def sorted[B >: Long](implicit ord: Ordering[B]): ArraySeq[Long] = - if(length <= 1) this - else if(ord eq Ordering.Long) { - val a = unsafeArray.clone() - Arrays.sort(a) - new ArraySeq.ofLong(a) - } else super.sorted[B] - override def iterator: Iterator[Long] = new ArrayOps.ArrayIterator[Long](unsafeArray) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Long, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParLongStepper(new LongArrayStepper(unsafeArray, 0, unsafeArray.length)) - else new LongArrayStepper(unsafeArray, 0, unsafeArray.length) - ).asInstanceOf[S with EfficientSplit] - override def updated[B >: Long](index: Int, elem: B): ArraySeq[B] = - elem match { - case b: Long => new ArraySeq.ofLong(unsafeArray.updated(index, b)) - case _ => super.updated(index, elem) - } - override def appended[B >: Long](elem: B): ArraySeq[B] = - elem match { - case b: Long => new ArraySeq.ofLong(unsafeArray.appended(b)) - case _ => super.appended(elem) - } - override def prepended[B >: Long](elem: B): ArraySeq[B] = - elem match { - case b: Long => new ArraySeq.ofLong(unsafeArray.prepended(b)) - case _ => super.prepended(elem) - } - } - - @SerialVersionUID(3L) - final class ofFloat(val unsafeArray: Array[Float]) extends ArraySeq[Float] { - protected def elemTag = ClassTag.Float - def length: Int = unsafeArray.length - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): Float = unsafeArray(i) - override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) - override def equals(that: Any) = that match { - case that: ofFloat => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - override def iterator: Iterator[Float] = new ArrayOps.ArrayIterator[Float](unsafeArray) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Float, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParDoubleStepper(new WidenedFloatArrayStepper(unsafeArray, 0, unsafeArray.length)) - else new WidenedFloatArrayStepper(unsafeArray, 0, unsafeArray.length) - ).asInstanceOf[S with EfficientSplit] - override def updated[B >: Float](index: Int, elem: B): ArraySeq[B] = - elem match { - case b: Float => new ArraySeq.ofFloat(unsafeArray.updated(index, b)) - case _ => super.updated(index, elem) - } - override def appended[B >: Float](elem: B): ArraySeq[B] = - elem match { - case b: Float => new ArraySeq.ofFloat(unsafeArray.appended(b)) - case _ => super.appended(elem) - } - override def prepended[B >: Float](elem: B): ArraySeq[B] = - elem match { - case b: Float => new ArraySeq.ofFloat(unsafeArray.prepended(b)) - case _ => super.prepended(elem) - } - } - - @SerialVersionUID(3L) - final class ofDouble(val unsafeArray: Array[Double]) extends ArraySeq[Double] { - protected def elemTag = ClassTag.Double - def length: Int = unsafeArray.length - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): Double = unsafeArray(i) - override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) - override def equals(that: Any) = that match { - case that: ofDouble => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - override def iterator: Iterator[Double] = new ArrayOps.ArrayIterator[Double](unsafeArray) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Double, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParDoubleStepper(new DoubleArrayStepper(unsafeArray, 0, unsafeArray.length)) - else new DoubleArrayStepper(unsafeArray, 0, unsafeArray.length) - ).asInstanceOf[S with EfficientSplit] - override def updated[B >: Double](index: Int, elem: B): ArraySeq[B] = - elem match { - case b: Double => new ArraySeq.ofDouble(unsafeArray.updated(index, b)) - case _ => super.updated(index, elem) - } - override def appended[B >: Double](elem: B): ArraySeq[B] = - elem match { - case b: Double => new ArraySeq.ofDouble(unsafeArray.appended(b)) - case _ => super.appended(elem) - } - override def prepended[B >: Double](elem: B): ArraySeq[B] = - elem match { - case b: Double => new ArraySeq.ofDouble(unsafeArray.prepended(b)) - case _ => super.prepended(elem) - } - } - - @SerialVersionUID(3L) - final class ofBoolean(val unsafeArray: Array[Boolean]) extends ArraySeq[Boolean] { - protected def elemTag = ClassTag.Boolean - def length: Int = unsafeArray.length - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): Boolean = unsafeArray(i) - override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) - override def equals(that: Any) = that match { - case that: ofBoolean => Arrays.equals(unsafeArray, that.unsafeArray) - case _ => super.equals(that) - } - override def sorted[B >: Boolean](implicit ord: Ordering[B]): ArraySeq[Boolean] = - if(length <= 1) this - else if(ord eq Ordering.Boolean) { - val a = unsafeArray.clone() - Sorting.stableSort(a) - new ArraySeq.ofBoolean(a) - } else super.sorted[B] - override def iterator: Iterator[Boolean] = new ArrayOps.ArrayIterator[Boolean](unsafeArray) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Boolean, S]): S with EfficientSplit = - new BoxedBooleanArrayStepper(unsafeArray, 0, unsafeArray.length).asInstanceOf[S with EfficientSplit] - override def updated[B >: Boolean](index: Int, elem: B): ArraySeq[B] = - elem match { - case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.updated(index, b)) - case _ => super.updated(index, elem) - } - override def appended[B >: Boolean](elem: B): ArraySeq[B] = - elem match { - case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.appended(b)) - case _ => super.appended(elem) - } - override def prepended[B >: Boolean](elem: B): ArraySeq[B] = - elem match { - case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.prepended(b)) - case _ => super.prepended(elem) - } - } - - @SerialVersionUID(3L) - final class ofUnit(val unsafeArray: Array[Unit]) extends ArraySeq[Unit] { - protected def elemTag = ClassTag.Unit - def length: Int = unsafeArray.length - @throws[ArrayIndexOutOfBoundsException] - def apply(i: Int): Unit = unsafeArray(i) - override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) - override def equals(that: Any) = that match { - case that: ofUnit => unsafeArray.length == that.unsafeArray.length - case _ => super.equals(that) - } - override def iterator: Iterator[Unit] = new ArrayOps.ArrayIterator[Unit](unsafeArray) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Unit, S]): S with EfficientSplit = - new ObjectArrayStepper[AnyRef](unsafeArray.asInstanceOf[Array[AnyRef]], 0, unsafeArray.length).asInstanceOf[S with EfficientSplit] - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/BitSet.scala b/tests/pos-special/stdlib/collection/immutable/BitSet.scala deleted file mode 100644 index 9c2bfdad54d0..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/BitSet.scala +++ /dev/null @@ -1,376 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import BitSetOps.{LogWL, updateArray} -import mutable.Builder -import scala.annotation.{implicitNotFound, nowarn} -import language.experimental.captureChecking - -/** A class for immutable bitsets. - * $bitsetinfo - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#immutable-bitsets "Scala's Collection Library overview"]] - * section on `Immutable BitSets` for more information. - * - * @define Coll `immutable.BitSet` - * @define coll immutable bitset - */ -sealed abstract class BitSet - extends AbstractSet[Int] - with SortedSet[Int] - with SortedSetOps[Int, SortedSet, BitSet] - with StrictOptimizedSortedSetOps[Int, SortedSet, BitSet] - with collection.BitSet - with collection.BitSetOps[BitSet] - with Serializable { - - override def unsorted: Set[Int] = this - - override protected def fromSpecific(coll: IterableOnce[Int]^): BitSet = bitSetFactory.fromSpecific(coll) - override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder - override def empty: BitSet = bitSetFactory.empty - - def bitSetFactory = BitSet - - protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): BitSet = BitSet.fromBitMaskNoCopy(elems) - - def incl(elem: Int): BitSet = { - require(elem >= 0, "bitset element must be >= 0") - if (contains(elem)) this - else { - val idx = elem >> LogWL - updateWord(idx, word(idx) | (1L << elem)) - } - } - - def excl(elem: Int): BitSet = { - require(elem >= 0, "bitset element must be >= 0") - if (contains(elem)) { - val idx = elem >> LogWL - updateWord(idx, word(idx) & ~(1L << elem)) - } else this - } - - /** Update word at index `idx`; enlarge set if `idx` outside range of set. - */ - protected def updateWord(idx: Int, w: Long): BitSet - - override def map(f: Int => Int): BitSet = strictOptimizedMap(newSpecificBuilder, f) - override def map[B](f: Int => B)(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = - super[StrictOptimizedSortedSetOps].map(f) - - override def flatMap(f: Int => IterableOnce[Int]): BitSet = strictOptimizedFlatMap(newSpecificBuilder, f) - override def flatMap[B](f: Int => IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = - super[StrictOptimizedSortedSetOps].flatMap(f) - - override def collect(pf: PartialFunction[Int, Int]): BitSet = strictOptimizedCollect(newSpecificBuilder, pf) - override def collect[B](pf: scala.PartialFunction[Int, B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = - super[StrictOptimizedSortedSetOps].collect(pf) - - // necessary for disambiguation - override def zip[B](that: scala.IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.zipOrdMsg) ev: Ordering[(Int, B)]): SortedSet[(Int, B)] = - super.zip(that) - - protected[this] def writeReplace(): AnyRef = new BitSet.SerializationProxy(this) -} - -/** - * $factoryInfo - * @define Coll `immutable.BitSet` - * @define coll immutable bitset - */ -@nowarn("cat=deprecation&msg=Implementation classes of BitSet should not be accessed directly") -@SerialVersionUID(3L) -object BitSet extends SpecificIterableFactory[Int, BitSet] { - - def fromSpecific(it: scala.collection.IterableOnce[Int]^): BitSet = - it match { - case bs: BitSet => bs - case _ => (newBuilder ++= it).result() - } - - final val empty: BitSet = new BitSet1(0L) - - def newBuilder: Builder[Int, BitSet] = - mutable.BitSet.newBuilder.mapResult(bs => fromBitMaskNoCopy(bs.elems)) - - private def createSmall(a: Long, b: Long): BitSet = if (b == 0L) new BitSet1(a) else new BitSet2(a, b) - - /** A bitset containing all the bits in an array */ - def fromBitMask(elems: Array[Long]): BitSet = { - val len = elems.length - if (len == 0) empty - else if (len == 1) new BitSet1(elems(0)) - else if (len == 2) createSmall(elems(0), elems(1)) - else { - val a = java.util.Arrays.copyOf(elems, len) - new BitSetN(a) - } - } - - /** A bitset containing all the bits in an array, wrapping the existing - * array without copying. - */ - def fromBitMaskNoCopy(elems: Array[Long]): BitSet = { - val len = elems.length - if (len == 0) empty - else if (len == 1) new BitSet1(elems(0)) - else if (len == 2) createSmall(elems(0), elems(1)) - else new BitSetN(elems) - } - - @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") - class BitSet1(val elems: Long) extends BitSet { - protected[collection] def nwords = 1 - protected[collection] def word(idx: Int) = if (idx == 0) elems else 0L - protected[collection] def updateWord(idx: Int, w: Long): BitSet = - if (idx == 0) new BitSet1(w) - else if (idx == 1) createSmall(elems, w) - else this.fromBitMaskNoCopy(updateArray(Array(elems), idx, w)) - - - override def diff(other: collection.Set[Int]): BitSet = other match { - case bs: collection.BitSet => bs.nwords match { - case 0 => this - case _ => - val newElems = elems & ~bs.word(0) - if (newElems == 0L) this.empty else new BitSet1(newElems) - } - case _ => super.diff(other) - } - - override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { - val _elems = BitSetOps.computeWordForFilter(pred, isFlipped, elems, 0) - if (_elems == 0L) this.empty else new BitSet1(_elems) - } - } - - @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") - class BitSet2(val elems0: Long, val elems1: Long) extends BitSet { - protected[collection] def nwords = 2 - protected[collection] def word(idx: Int) = if (idx == 0) elems0 else if (idx == 1) elems1 else 0L - protected[collection] def updateWord(idx: Int, w: Long): BitSet = - if (idx == 0) new BitSet2(w, elems1) - else if (idx == 1) createSmall(elems0, w) - else this.fromBitMaskNoCopy(updateArray(Array(elems0, elems1), idx, w)) - - - override def diff(other: collection.Set[Int]): BitSet = other match { - case bs: collection.BitSet => bs.nwords match { - case 0 => this - case 1 => - new BitSet2(elems0 & ~bs.word(0), elems1) - case _ => - val _elems0 = elems0 & ~bs.word(0) - val _elems1 = elems1 & ~bs.word(1) - - if (_elems1 == 0L) { - if (_elems0 == 0L) { - this.empty - } else { - new BitSet1(_elems0) - } - } else { - new BitSet2(_elems0, _elems1) - } - } - case _ => super.diff(other) - } - - override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { - val _elems0 = BitSetOps.computeWordForFilter(pred, isFlipped, elems0, 0) - val _elems1 = BitSetOps.computeWordForFilter(pred, isFlipped, elems1, 1) - - if (_elems1 == 0L) { - if (_elems0 == 0L) { - this.empty - } - else new BitSet1(_elems0) - } - else new BitSet2(_elems0, _elems1) - } - } - - @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") - class BitSetN(val elems: Array[Long]) extends BitSet { - protected[collection] def nwords = elems.length - - protected[collection] def word(idx: Int) = if (idx < nwords) elems(idx) else 0L - - protected[collection] def updateWord(idx: Int, w: Long): BitSet = this.fromBitMaskNoCopy(updateArray(elems, idx, w)) - - override def diff(that: collection.Set[Int]): BitSet = that match { - case bs: collection.BitSet => - /* - * Algorithm: - * - * We iterate, word-by-word, backwards from the shortest of the two bitsets (this, or bs) i.e. the one with - * the fewer words. Two extra concerns for optimization are described below. - * - * Array Shrinking: - * If `this` is not longer than `bs`, then since we must iterate through the full array of words, - * we can track the new highest index word which is non-zero, at little additional cost. At the end, the new - * Array[Long] allocated for the returned BitSet will only be of size `maxNonZeroIndex + 1` - * - * Tracking Changes: - * If the two sets are disjoint, then we can return `this`. Therefor, until at least one change is detected, - * we check each word for if it has changed from its corresponding word in `this`. Once a single change is - * detected, we stop checking because the cost of the new Array must be paid anyways. - */ - - val bsnwords = bs.nwords - val thisnwords = nwords - if (bsnwords >= thisnwords) { - // here, we may have opportunity to shrink the size of the array - // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length - var i = thisnwords - 1 - var currentWord = 0L - // if there are never any changes, we can return `this` at the end - var anyChanges = false - while (i >= 0 && currentWord == 0L) { - val oldWord = word(i) - currentWord = oldWord & ~bs.word(i) - anyChanges ||= currentWord != oldWord - i -= 1 - } - i match { - case -1 => - if (anyChanges) { - if (currentWord == 0) { - this.empty - } else { - new BitSet1(currentWord) - } - } else { - this - } - case 0 => - val oldFirstWord = word(0) - val firstWord = oldFirstWord & ~bs.word(0) - anyChanges ||= firstWord != oldFirstWord - if (anyChanges) { - new BitSet2(firstWord, currentWord) - } else { - this - } - case _ => - val minimumNonZeroIndex: Int = i + 1 - while (!anyChanges && i >= 0) { - val oldWord = word(i) - currentWord = oldWord & ~bs.word(i) - anyChanges ||= currentWord != oldWord - i -= 1 - } - if (anyChanges) { - val newArray = elems.take(minimumNonZeroIndex + 1) - newArray(i + 1) = currentWord - while (i >= 0) { - newArray(i) = word(i) & ~bs.word(i) - i -= 1 - } - new BitSetN(newArray) - } else { - this - } - } - } else { - var i = bsnwords - 1 - var anyChanges = false - var currentWord = 0L - while (i >= 0 && !anyChanges) { - val oldWord = word(i) - currentWord = oldWord & ~bs.word(i) - anyChanges ||= currentWord != oldWord - i -= 1 - } - if (anyChanges) { - val newElems = elems.clone() - newElems(i + 1) = currentWord - while (i >= 0) { - newElems(i) = word(i) & ~bs.word(i) - i -= 1 - } - this.fromBitMaskNoCopy(newElems) - } else { - this - } - } - case _ => super.diff(that) - } - - - override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { - // here, we may have opportunity to shrink the size of the array - // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length - var i = nwords - 1 - var currentWord = 0L - // if there are never any changes, we can return `this` at the end - var anyChanges = false - while (i >= 0 && currentWord == 0L) { - val oldWord = word(i) - currentWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldWord, i) - anyChanges ||= currentWord != oldWord - i -= 1 - } - i match { - case -1 => - if (anyChanges) { - if (currentWord == 0) { - this.empty - } else { - new BitSet1(currentWord) - } - } else { - this - } - case 0 => - val oldFirstWord = word(0) - val firstWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldFirstWord, 0) - anyChanges ||= firstWord != oldFirstWord - if (anyChanges) { - new BitSet2(firstWord, currentWord) - } else { - this - } - case _ => - val minimumNonZeroIndex: Int = i + 1 - while (!anyChanges && i >= 0) { - val oldWord = word(i) - currentWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldWord, i) - anyChanges ||= currentWord != oldWord - i -= 1 - } - if (anyChanges) { - val newArray = elems.take(minimumNonZeroIndex + 1) - newArray(i + 1) = currentWord - while (i >= 0) { - newArray(i) = BitSetOps.computeWordForFilter(pred, isFlipped, word(i), i) - i -= 1 - } - new BitSetN(newArray) - } else { - this - } - } - } - - override def toBitMask: Array[Long] = elems.clone() - } - - @SerialVersionUID(3L) - private final class SerializationProxy(coll: BitSet) extends scala.collection.BitSet.SerializationProxy(coll) { - protected[this] def readResolve(): Any = BitSet.fromBitMaskNoCopy(elems) - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala b/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala deleted file mode 100644 index fc9bcb022874..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala +++ /dev/null @@ -1,253 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.immutable - - -import java.lang.Integer.bitCount -import java.lang.Math.ceil -import java.lang.System.arraycopy -import language.experimental.captureChecking - -private[collection] object Node { - final val HashCodeLength = 32 - - final val BitPartitionSize = 5 - - final val BitPartitionMask = (1 << BitPartitionSize) - 1 - - final val MaxDepth = ceil(HashCodeLength.toDouble / BitPartitionSize).toInt - - final val BranchingFactor = 1 << BitPartitionSize - - final def maskFrom(hash: Int, shift: Int): Int = (hash >>> shift) & BitPartitionMask - - final def bitposFrom(mask: Int): Int = 1 << mask - - final def indexFrom(bitmap: Int, bitpos: Int): Int = bitCount(bitmap & (bitpos - 1)) - - final def indexFrom(bitmap: Int, mask: Int, bitpos: Int): Int = if (bitmap == -1) mask else indexFrom(bitmap, bitpos) - -} - -private[collection] abstract class Node[T <: Node[T]] { - - def hasNodes: Boolean - - def nodeArity: Int - - def getNode(index: Int): T - - def hasPayload: Boolean - - def payloadArity: Int - - def getPayload(index: Int): Any - - def getHash(index: Int): Int - - def cachedJavaKeySetHashCode: Int - - private final def arrayIndexOutOfBounds(as: Array[_], ix:Int): ArrayIndexOutOfBoundsException = - new ArrayIndexOutOfBoundsException(s"$ix is out of bounds (min 0, max ${as.length-1}") - - protected final def removeElement(as: Array[Int], ix: Int): Array[Int] = { - if (ix < 0) throw arrayIndexOutOfBounds(as, ix) - if (ix > as.length - 1) throw arrayIndexOutOfBounds(as, ix) - val result = new Array[Int](as.length - 1) - arraycopy(as, 0, result, 0, ix) - arraycopy(as, ix + 1, result, ix, as.length - ix - 1) - result - } - - protected final def removeAnyElement(as: Array[Any], ix: Int): Array[Any] = { - if (ix < 0) throw arrayIndexOutOfBounds(as, ix) - if (ix > as.length - 1) throw arrayIndexOutOfBounds(as, ix) - val result = new Array[Any](as.length - 1) - arraycopy(as, 0, result, 0, ix) - arraycopy(as, ix + 1, result, ix, as.length - ix - 1) - result - } - - protected final def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { - if (ix < 0) throw arrayIndexOutOfBounds(as, ix) - if (ix > as.length) throw arrayIndexOutOfBounds(as, ix) - val result = new Array[Int](as.length + 1) - arraycopy(as, 0, result, 0, ix) - result(ix) = elem - arraycopy(as, ix, result, ix + 1, as.length - ix) - result - } - protected final def insertAnyElement(as: Array[Any], ix: Int, elem: Int): Array[Any] = { - if (ix < 0) throw arrayIndexOutOfBounds(as, ix) - if (ix > as.length) throw arrayIndexOutOfBounds(as, ix) - val result = new Array[Any](as.length + 1) - arraycopy(as, 0, result, 0, ix) - result(ix) = elem - arraycopy(as, ix, result, ix + 1, as.length - ix) - result - } -} - -/** - * Base class for fixed-stack iterators that traverse a hash-trie. The iterator performs a - * depth-first pre-order traversal, which yields first all payload elements of the current - * node before traversing sub-nodes (left to right). - * - * @tparam T the trie node type we are iterating over - */ -private[immutable] abstract class ChampBaseIterator[T <: Node[T]] { - - import Node.MaxDepth - - // Note--this code is duplicated to a large extent both in - // ChampBaseReverseIterator and in convert.impl.ChampStepperBase. - // If you change this code, check those also in case they also - // need to be modified. - - protected var currentValueCursor: Int = 0 - protected var currentValueLength: Int = 0 - protected var currentValueNode: T = _ - - private[this] var currentStackLevel: Int = -1 - private[this] var nodeCursorsAndLengths: Array[Int] = _ - private[this] var nodes: Array[T] = _ - private def initNodes(): Unit = { - if (nodeCursorsAndLengths eq null) { - nodeCursorsAndLengths = new Array[Int](MaxDepth * 2) - nodes = new Array[Node[T]](MaxDepth).asInstanceOf[Array[T]] - } - } - - def this(rootNode: T) = { - this() - if (rootNode.hasNodes) pushNode(rootNode) - if (rootNode.hasPayload) setupPayloadNode(rootNode) - } - - private final def setupPayloadNode(node: T): Unit = { - currentValueNode = node - currentValueCursor = 0 - currentValueLength = node.payloadArity - } - - private final def pushNode(node: T): Unit = { - initNodes() - currentStackLevel = currentStackLevel + 1 - - val cursorIndex = currentStackLevel * 2 - val lengthIndex = currentStackLevel * 2 + 1 - - nodes(currentStackLevel) = node - nodeCursorsAndLengths(cursorIndex) = 0 - nodeCursorsAndLengths(lengthIndex) = node.nodeArity - } - - private final def popNode(): Unit = { - currentStackLevel = currentStackLevel - 1 - } - - /** - * Searches for next node that contains payload values, - * and pushes encountered sub-nodes on a stack for depth-first traversal. - */ - private final def searchNextValueNode(): Boolean = { - while (currentStackLevel >= 0) { - val cursorIndex = currentStackLevel * 2 - val lengthIndex = currentStackLevel * 2 + 1 - - val nodeCursor = nodeCursorsAndLengths(cursorIndex) - val nodeLength = nodeCursorsAndLengths(lengthIndex) - - if (nodeCursor < nodeLength) { - nodeCursorsAndLengths(cursorIndex) += 1 - - val nextNode = nodes(currentStackLevel).getNode(nodeCursor) - - if (nextNode.hasNodes) { pushNode(nextNode) } - if (nextNode.hasPayload) { setupPayloadNode(nextNode) ; return true } - } else { - popNode() - } - } - - return false - } - - final def hasNext = (currentValueCursor < currentValueLength) || searchNextValueNode() - -} - -/** - * Base class for fixed-stack iterators that traverse a hash-trie in reverse order. The base - * iterator performs a depth-first post-order traversal, traversing sub-nodes (right to left). - * - * @tparam T the trie node type we are iterating over - */ -private[immutable] abstract class ChampBaseReverseIterator[T <: Node[T]] { - - import Node.MaxDepth - - protected var currentValueCursor: Int = -1 - protected var currentValueNode: T = _ - - private[this] var currentStackLevel: Int = -1 - private[this] val nodeIndex: Array[Int] = new Array[Int](MaxDepth + 1) - private[this] val nodeStack: Array[T] = new Array[Node[T]](MaxDepth + 1).asInstanceOf[Array[T]] - - def this(rootNode: T) = { - this() - pushNode(rootNode) - searchNextValueNode() - } - - private final def setupPayloadNode(node: T): Unit = { - currentValueNode = node - currentValueCursor = node.payloadArity - 1 - } - - private final def pushNode(node: T): Unit = { - currentStackLevel = currentStackLevel + 1 - - nodeStack(currentStackLevel) = node - nodeIndex(currentStackLevel) = node.nodeArity - 1 - } - - private final def popNode(): Unit = { - currentStackLevel = currentStackLevel - 1 - } - - /** - * Searches for rightmost node that contains payload values, - * and pushes encountered sub-nodes on a stack for depth-first traversal. - */ - private final def searchNextValueNode(): Boolean = { - while (currentStackLevel >= 0) { - val nodeCursor = nodeIndex(currentStackLevel) ; nodeIndex(currentStackLevel) = nodeCursor - 1 - - if (nodeCursor >= 0) { - val nextNode = nodeStack(currentStackLevel).getNode(nodeCursor) - pushNode(nextNode) - } else { - val currNode = nodeStack(currentStackLevel) - popNode() - - if (currNode.hasPayload) { setupPayloadNode(currNode) ; return true } - } - } - - return false - } - - final def hasNext = (currentValueCursor >= 0) || searchNextValueNode() - -} diff --git a/tests/pos-special/stdlib/collection/immutable/HashMap.scala b/tests/pos-special/stdlib/collection/immutable/HashMap.scala deleted file mode 100644 index c364924db3a3..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/HashMap.scala +++ /dev/null @@ -1,2425 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.immutable - -import java.lang.Integer.bitCount -import java.lang.System.arraycopy - -import scala.annotation.unchecked.{uncheckedVariance => uV} -import scala.collection.Hashing.improve -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializable -import scala.collection.mutable, mutable.ReusableBuilder -import scala.collection.{Iterator, MapFactory, MapFactoryDefaults, Stepper, StepperShape, mutable} -import scala.runtime.AbstractFunction2 -import scala.runtime.Statics.releaseFence -import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** This class implements immutable maps using a Compressed Hash-Array Mapped Prefix-tree. - * See paper https://michael.steindorfer.name/publications/oopsla15.pdf for more details. - * - * @tparam K the type of the keys contained in this hash set. - * @tparam V the type of the values associated with the keys in this hash map. - * - * @define Coll `immutable.HashMap` - * @define coll immutable champ hash map - */ - -final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: BitmapIndexedMapNode[K, V]) - extends AbstractMap[K, V] - with StrictOptimizedMapOps[K, V, HashMap, HashMap[K, V]] - with MapFactoryDefaults[K, V, HashMap, Iterable] - with DefaultSerializable { - - def this() = this(MapNode.empty) - - // This release fence is present because rootNode may have previously been mutated during construction. - releaseFence() - - override def mapFactory: MapFactory[HashMap] = HashMap - - override def knownSize: Int = rootNode.size - - override def size: Int = rootNode.size - - override def isEmpty: Boolean = rootNode.size == 0 - - override def keySet: Set[K] = if (size == 0) Set.empty else new HashKeySet - - private final class HashKeySet extends ImmutableKeySet { - - private[this] def newKeySetOrThis(newHashMap: HashMap[K, _]): Set[K] = - if (newHashMap eq HashMap.this) this else newHashMap.keySet - private[this] def newKeySetOrThis(newRootNode: BitmapIndexedMapNode[K, _]): Set[K] = - if (newRootNode eq rootNode) this else new HashMap(newRootNode).keySet - - override def incl(elem: K): Set[K] = { - val originalHash = elem.## - val improvedHash = improve(originalHash) - val newNode = rootNode.updated(elem, null.asInstanceOf[V], originalHash, improvedHash, 0, replaceValue = false) - newKeySetOrThis(newNode) - } - override def excl(elem: K): Set[K] = newKeySetOrThis(HashMap.this - elem) - override def filter(pred: K => Boolean): Set[K] = newKeySetOrThis(HashMap.this.filter(kv => pred(kv._1))) - override def filterNot(pred: K => Boolean): Set[K] = newKeySetOrThis(HashMap.this.filterNot(kv => pred(kv._1))) - } - - def iterator: Iterator[(K, V)] = { - if (isEmpty) Iterator.empty - else new MapKeyValueTupleIterator[K, V](rootNode) - } - - override def keysIterator: Iterator[K] = { - if (isEmpty) Iterator.empty - else new MapKeyIterator[K, V](rootNode) - } - override def valuesIterator: Iterator[V] = { - if (isEmpty) Iterator.empty - else new MapValueIterator[K, V](rootNode) - } - - protected[immutable] def reverseIterator: Iterator[(K, V)] = { - if (isEmpty) Iterator.empty - else new MapKeyValueTupleReverseIterator[K, V](rootNode) - } - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = - shape. - parUnbox(collection.convert.impl.AnyChampStepper.from[(K, V), MapNode[K, V]](size, rootNode, (node, i) => node.getPayload(i))) - - override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { - import collection.convert.impl._ - val s = shape.shape match { - case StepperShape.IntShape => IntChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Int]) - case StepperShape.LongShape => LongChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Long]) - case StepperShape.DoubleShape => DoubleChampStepper.from[MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Double]) - case _ => shape.parUnbox(AnyChampStepper.from[K, MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i))) - } - s.asInstanceOf[S with EfficientSplit] - } - - override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { - import collection.convert.impl._ - val s = shape.shape match { - case StepperShape.IntShape => IntChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Int]) - case StepperShape.LongShape => LongChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Long]) - case StepperShape.DoubleShape => DoubleChampStepper.from[MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Double]) - case _ => shape.parUnbox(AnyChampStepper.from[V, MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i))) - } - s.asInstanceOf[S with EfficientSplit] - } - - override final def contains(key: K): Boolean = { - val keyUnimprovedHash = key.## - val keyHash = improve(keyUnimprovedHash) - rootNode.containsKey(key, keyUnimprovedHash, keyHash, 0) - } - - override def apply(key: K): V = { - val keyUnimprovedHash = key.## - val keyHash = improve(keyUnimprovedHash) - rootNode.apply(key, keyUnimprovedHash, keyHash, 0) - } - - def get(key: K): Option[V] = { - val keyUnimprovedHash = key.## - val keyHash = improve(keyUnimprovedHash) - rootNode.get(key, keyUnimprovedHash, keyHash, 0) - } - - override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { - val keyUnimprovedHash = key.## - val keyHash = improve(keyUnimprovedHash) - rootNode.getOrElse(key, keyUnimprovedHash, keyHash, 0, default) - } - - @`inline` private[this] def newHashMapOrThis[V1 >: V](newRootNode: BitmapIndexedMapNode[K, V1]): HashMap[K, V1] = - if (newRootNode eq rootNode) this else new HashMap(newRootNode) - - def updated[V1 >: V](key: K, value: V1): HashMap[K, V1] = { - val keyUnimprovedHash = key.## - newHashMapOrThis(rootNode.updated(key, value, keyUnimprovedHash, improve(keyUnimprovedHash), 0, replaceValue = true)) - } - - // preemptively overridden in anticipation of performance optimizations - override def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): HashMap[K, V1] = - super.updatedWith[V1](key)(remappingFunction) - - def removed(key: K): HashMap[K, V] = { - val keyUnimprovedHash = key.## - newHashMapOrThis(rootNode.removed(key, keyUnimprovedHash, improve(keyUnimprovedHash), 0)) - } - - override def concat[V1 >: V](that: scala.IterableOnce[(K, V1)]^): HashMap[K, V1] = that match { - case hm: HashMap[K, V1] => - if (isEmpty) hm - else { - val newNode = rootNode.concat(hm.rootNode, 0) - if (newNode eq hm.rootNode) hm - else newHashMapOrThis(rootNode.concat(hm.rootNode, 0)) - } - case hm: mutable.HashMap[K @unchecked, V @unchecked] => - val iter = hm.nodeIterator - var current = rootNode - while (iter.hasNext) { - val next = iter.next() - val originalHash = hm.unimproveHash(next.hash) - val improved = improve(originalHash) - current = current.updated(next.key, next.value, originalHash, improved, 0, replaceValue = true) - - if (current ne rootNode) { - var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) - - while (iter.hasNext) { - val next = iter.next() - val originalHash = hm.unimproveHash(next.hash) - shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, next.value, originalHash, improve(originalHash), 0, shallowlyMutableNodeMap) - } - return new HashMap(current) - } - } - this - case lhm: mutable.LinkedHashMap[K @unchecked, V @unchecked] => - val iter = lhm.entryIterator - var current = rootNode - while (iter.hasNext) { - val next = iter.next() - val originalHash = lhm.unimproveHash(next.hash) - val improved = improve(originalHash) - current = current.updated(next.key, next.value, originalHash, improved, 0, replaceValue = true) - - if (current ne rootNode) { - var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) - - while (iter.hasNext) { - val next = iter.next() - val originalHash = lhm.unimproveHash(next.hash) - shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, next.value, originalHash, improve(originalHash), 0, shallowlyMutableNodeMap) - } - return new HashMap(current) - } - } - this - case _ => - class accum extends AbstractFunction2[K, V1, Unit] with Function1[(K, V1), Unit] { - var changed = false - var shallowlyMutableNodeMap: Int = 0 - var current: BitmapIndexedMapNode[K, V1] = rootNode - def apply(kv: (K, V1)) = apply(kv._1, kv._2) - def apply(key: K, value: V1): Unit = { - val originalHash = key.## - val improved = improve(originalHash) - if (!changed) { - current = current.updated(key, value, originalHash, improved, 0, replaceValue = true) - if (current ne rootNode) { - // Note: We could have started with shallowlyMutableNodeMap = 0, however this way, in the case that - // the first changed key ended up in a subnode beneath root, we mark that root right away as being - // shallowly mutable. - // - // since key->value has just been inserted, and certainly caused a new root node to be created, we can say with - // certainty that it either caused a new subnode to be created underneath `current`, in which case we should - // carry on mutating that subnode, or it ended up as a child data pair of the root, in which case, no harm is - // done by including its bit position in the shallowlyMutableNodeMap anyways. - changed = true - shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) - } - } else { - shallowlyMutableNodeMap = current.updateWithShallowMutations(key, value, originalHash, improved, 0, shallowlyMutableNodeMap) - } - } - } - that match { - case thatMap: Map[K, V1] => - if (thatMap.isEmpty) this - else { - val accum = new accum - thatMap.foreachEntry(accum) - newHashMapOrThis(accum.current) - } - case _ => - val it = that.iterator - if (it.isEmpty) this - else { - val accum = new accum - it.foreach(accum) - newHashMapOrThis(accum.current) - } - } - } - - override def tail: HashMap[K, V] = this - head._1 - - override def init: HashMap[K, V] = this - last._1 - - override def head: (K, V) = iterator.next() - - override def last: (K, V) = reverseIterator.next() - - override def foreach[U](f: ((K, V)) => U): Unit = rootNode.foreach(f) - - override def foreachEntry[U](f: (K, V) => U): Unit = rootNode.foreachEntry(f) - - /** Applies a function to each key, value, and **original** hash value in this Map */ - @`inline` private[collection] def foreachWithHash(f: (K, V, Int) => Unit): Unit = rootNode.foreachWithHash(f) - - override def equals(that: Any): Boolean = - that match { - case map: HashMap[_, _] => (this eq map) || (this.rootNode == map.rootNode) - case _ => super.equals(that) - } - - override def hashCode(): Int = { - if (isEmpty) MurmurHash3.emptyMapHash - else { - // Optimized to avoid recomputation of key hashcodes as these are cached in the nodes and can be assumed to be - // immutable. - val hashIterator = new MapKeyValueTupleHashIterator(rootNode) - val hash = MurmurHash3.unorderedHash(hashIterator, MurmurHash3.mapSeed) - // assert(hash == super.hashCode()) - hash - } - } - - override protected[this] def className = "HashMap" - - /** Merges this HashMap with an other HashMap by combining all key-value pairs of both maps, and delegating to a merge - * function to resolve any key collisions between the two HashMaps. - * - * @example {{{ - * val left = HashMap(1 -> 1, 2 -> 1) - * val right = HashMap(2 -> 2, 3 -> 2) - * - * val merged = left.merged(right){ case ((k0, v0), (k1, v1)) => (k0 + k1) -> (v0 + v1) } - * // HashMap(1 -> 1, 3 -> 2, 4 -> 3) - * - * }}} - * - * @param that the HashMap to merge this HashMap with - * @param mergef the merge function which resolves collisions between the two HashMaps. If `mergef` is null, then - * keys from `this` will overwrite keys from `that`, making the behaviour equivalent to - * `that.concat(this)` - * - * @note In cases where `mergef` returns keys which themselves collide with other keys returned by `merge`, or - * found in `this` or `that`, it is not defined which value will be chosen. For example: - * - * Colliding multiple results of merging: - * {{{ - * // key `3` collides between a result of merging keys `1` and `2` - * val left = HashMap(1 -> 1, 2 -> 2) - * val right = HashMap(1 -> 1, 2 -> 2) - * - * val merged = left.merged(right){ case (_, (_, v1)) => 3 -> v1 } - * // HashMap(3 -> 2) is returned, but it could also have returned HashMap(3 -> 1) - * }}} - * Colliding results of merging with other keys: - * {{{ - * // key `2` collides between a result of merging `1`, and existing key `2` - * val left = HashMap(1 -> 1, 2 -> 1) - * val right = HashMap(1 -> 2) - * - * val merged = left.merged(right)((_,_) => 2 -> 3) - * // HashMap(2 -> 1) is returned, but it could also have returned HashMap(2 -> 3) - * }}} - * - */ - def merged[V1 >: V](that: HashMap[K, V1])(mergef: ((K, V), (K, V1)) => (K, V1)): HashMap[K, V1] = - if (mergef == null) { - that ++ this - } else { - if (isEmpty) that - else if (that.isEmpty) this - else if (size == 1) { - val payload@(k, v) = rootNode.getPayload(0) - val originalHash = rootNode.getHash(0) - val improved = improve(originalHash) - - if (that.rootNode.containsKey(k, originalHash, improved, 0)) { - val thatPayload = that.rootNode.getTuple(k, originalHash, improved, 0) - val (mergedK, mergedV) = mergef(payload, thatPayload) - val mergedOriginalHash = mergedK.## - val mergedImprovedHash = improve(mergedOriginalHash) - new HashMap(that.rootNode.removed(thatPayload._1, originalHash, improved, 0).updated(mergedK, mergedV, mergedOriginalHash, mergedImprovedHash, 0, replaceValue = true)) - } else { - new HashMap(that.rootNode.updated(k, v, originalHash, improved, 0, replaceValue = true)) - } - } else if (that.size == 0) { - val thatPayload@(k, v) = rootNode.getPayload(0) - val thatOriginalHash = rootNode.getHash(0) - val thatImproved = improve(thatOriginalHash) - - if (rootNode.containsKey(k, thatOriginalHash, thatImproved, 0)) { - val payload = rootNode.getTuple(k, thatOriginalHash, thatImproved, 0) - val (mergedK, mergedV) = mergef(payload, thatPayload) - val mergedOriginalHash = mergedK.## - val mergedImprovedHash = improve(mergedOriginalHash) - new HashMap(rootNode.updated(mergedK, mergedV, mergedOriginalHash, mergedImprovedHash, 0, replaceValue = true)) - } else { - new HashMap(rootNode.updated(k, v, thatOriginalHash, thatImproved, 0, replaceValue = true)) - } - } else { - val builder = new HashMapBuilder[K, V1] - rootNode.mergeInto(that.rootNode, builder, 0)(mergef) - builder.result() - } - } - - override def transform[W](f: (K, V) => W): HashMap[K, W] = - newHashMapOrThis(rootNode.transform[Any](f)).asInstanceOf[HashMap[K, W]] - - override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): HashMap[K, V] = { - val newRootNode = rootNode.filterImpl(pred, isFlipped) - if (newRootNode eq rootNode) this - else if (newRootNode.size == 0) HashMap.empty - else new HashMap(newRootNode) - } - - override def removedAll(keys: IterableOnce[K]^): HashMap[K, V] = { - if (isEmpty) { - this - } else { - keys match { - case hashSet: HashSet[K] => - if (hashSet.isEmpty) { - this - } else { - // TODO: Remove all keys from the hashSet in a sub-linear fashion by only visiting the nodes in the tree - // This can be a direct port of the implementation of `SetNode[A]#diff(SetNode[A])` - val newRootNode = new MapNodeRemoveAllSetNodeIterator(hashSet.rootNode).removeAll(rootNode) - if (newRootNode eq rootNode) this - else if (newRootNode.size <= 0) HashMap.empty - else new HashMap(newRootNode) - } - case hashSet: collection.mutable.HashSet[K] => - if (hashSet.isEmpty) { - this - } else { - val iter = hashSet.nodeIterator - var curr = rootNode - - while (iter.hasNext) { - val next = iter.next() - val originalHash = hashSet.unimproveHash(next.hash) - val improved = improve(originalHash) - curr = curr.removed(next.key, originalHash, improved, 0) - if (curr.size == 0) { - return HashMap.empty - } - } - newHashMapOrThis(curr) - } - case lhashSet: collection.mutable.LinkedHashSet[K] => - if (lhashSet.isEmpty) { - this - } else { - val iter = lhashSet.entryIterator - var curr = rootNode - - while (iter.hasNext) { - val next = iter.next() - val originalHash = lhashSet.unimproveHash(next.hash) - val improved = improve(originalHash) - curr = curr.removed(next.key, originalHash, improved, 0) - if (curr.size == 0) { - return HashMap.empty - } - } - newHashMapOrThis(curr) - } - case _ => - val iter = keys.iterator - var curr = rootNode - while (iter.hasNext) { - val next = iter.next() - val originalHash = next.## - val improved = improve(originalHash) - curr = curr.removed(next, originalHash, improved, 0) - if (curr.size == 0) { - return HashMap.empty - } - } - newHashMapOrThis(curr) - } - } - } - - override def partition(p: ((K, V)) => Boolean): (HashMap[K, V], HashMap[K, V]) = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - // - // In particular, `partition` could be optimized to traverse the trie node-by-node, splitting each node into two, - // based on the result of applying `p` to its elements and subnodes. - super.partition(p) - } - - override def take(n: Int): HashMap[K, V] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - // - // In particular, `take` could be optimized to construct a new trie structure by visiting each node, and including - // those nodes in the resulting trie, until `n` total elements have been included. - super.take(n) - } - - override def takeRight(n: Int): HashMap[K, V] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - // - // In particular, `take` could be optimized to construct a new trie structure by visiting each node in reverse, and - // and including those nodes in the resulting trie, until `n` total elements have been included. - super.takeRight(n) - } - - override def takeWhile(p: ((K, V)) => Boolean): HashMap[K, V] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - // - // In particular, `takeWhile` could be optimized to construct a new trie structure by visiting each node, and - // including those nodes in the resulting trie, until `p` returns `false` - super.takeWhile(p) - } - - override def dropWhile(p: ((K, V)) => Boolean): HashMap[K, V] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - // - // In particular, `dropWhile` could be optimized to construct a new trie structure by visiting each node, and - // dropping those nodes in the resulting trie, until `p` returns `true` - super.dropWhile(p) - } - - override def dropRight(n: Int): HashMap[K, V] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - // - // In particular, `dropRight` could be optimized to construct a new trie structure by visiting each node, in reverse - // order, and dropping all nodes until `n` elements have been dropped - super.dropRight(n) - } - - override def drop(n: Int): HashMap[K, V] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - // - // In particular, `dropRight` could be optimized to construct a new trie structure by visiting each node, and - // dropping all nodes until `n` elements have been dropped - super.drop(n) - } - - override def span(p: ((K, V)) => Boolean): (HashMap[K, V], HashMap[K, V]) = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - // - // In particular, `scan` could be optimized to construct a new trie structure by visiting each node, and - // keeping each node and element until `p` returns false, then including the remaining nodes in the second result. - // This would avoid having to rebuild most of the trie, and would eliminate the need to perform hashing and equality - // checks. - super.span(p) - } - -} - -private[immutable] object MapNode { - - private final val EmptyMapNode = new BitmapIndexedMapNode(0, 0, Array.empty, Array.empty, 0, 0) - - def empty[K, V]: BitmapIndexedMapNode[K, V] = EmptyMapNode.asInstanceOf[BitmapIndexedMapNode[K, V]] - - final val TupleLength = 2 - -} - - -private[immutable] sealed abstract class MapNode[K, +V] extends Node[MapNode[K, V @uV]] { - def apply(key: K, originalHash: Int, hash: Int, shift: Int): V - - def get(key: K, originalHash: Int, hash: Int, shift: Int): Option[V] - - def getOrElse[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int, f: => V1): V1 - - def containsKey(key: K, originalHash: Int, hash: Int, shift: Int): Boolean - - /** Returns a MapNode with the passed key-value assignment added - * - * @param key the key to add to the MapNode - * @param value the value to associate with `key` - * @param originalHash the original hash of `key` - * @param hash the improved hash of `key` - * @param shift the shift of the node (distanceFromRoot * BitPartitionSize) - * @param replaceValue if true, then the value currently associated to `key` will be replaced with the passed value - * argument. - * if false, then the key will be inserted if not already present, however if the key is present - * then the passed value will not replace the current value. That is, if `false`, then this - * method has `update if not exists` semantics. - */ - def updated[V1 >: V](key: K, value: V1, originalHash: Int, hash: Int, shift: Int, replaceValue: Boolean): MapNode[K, V1] - - def removed[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int): MapNode[K, V1] - - def hasNodes: Boolean - - def nodeArity: Int - - def getNode(index: Int): MapNode[K, V] - - def hasPayload: Boolean - - def payloadArity: Int - - def getKey(index: Int): K - - def getValue(index: Int): V - - def getPayload(index: Int): (K, V) - - def size: Int - - def foreach[U](f: ((K, V)) => U): Unit - - def foreachEntry[U](f: (K, V) => U): Unit - - def foreachWithHash(f: (K, V, Int) => Unit): Unit - - def transform[W](f: (K, V) => W): MapNode[K, W] - - def copy(): MapNode[K, V] - - def concat[V1 >: V](that: MapNode[K, V1], shift: Int): MapNode[K, V1] - - def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): MapNode[K, V] - - /** Merges this node with that node, adding each resulting tuple to `builder` - * - * `this` should be a node from `left` hashmap in `left.merged(right)(mergef)` - * - * @param that node from the "right" HashMap. Must also be at the same "path" or "position" within the right tree, - * as `this` is, within the left tree - */ - def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit - - /** Returns the exact (equal by reference) key, and value, associated to a given key. - * If the key is not bound to a value, then an exception is thrown - */ - def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) - - /** Adds all key-value pairs to a builder */ - def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit -} - -private final class BitmapIndexedMapNode[K, +V]( - var dataMap: Int, - var nodeMap: Int, - var content: Array[Any], - var originalHashes: Array[Int], - var size: Int, - var cachedJavaKeySetHashCode: Int) extends MapNode[K, V] { - - releaseFence() - - import MapNode._ - import Node._ - - /* - assert(checkInvariantContentIsWellTyped()) - assert(checkInvariantSubNodesAreCompacted()) - - private final def checkInvariantSubNodesAreCompacted(): Boolean = - new MapKeyValueTupleIterator[K, V](this).size - payloadArity >= 2 * nodeArity - - private final def checkInvariantContentIsWellTyped(): Boolean = { - val predicate1 = TupleLength * payloadArity + nodeArity == content.length - - val predicate2 = Range(0, TupleLength * payloadArity) - .forall(i => content(i).isInstanceOf[MapNode[_, _]] == false) - - val predicate3 = Range(TupleLength * payloadArity, content.length) - .forall(i => content(i).isInstanceOf[MapNode[_, _]] == true) - - predicate1 && predicate2 && predicate3 - } - */ - - def getKey(index: Int): K = content(TupleLength * index).asInstanceOf[K] - def getValue(index: Int): V = content(TupleLength * index + 1).asInstanceOf[V] - - def getPayload(index: Int) = Tuple2( - content(TupleLength * index).asInstanceOf[K], - content(TupleLength * index + 1).asInstanceOf[V]) - - override def getHash(index: Int): Int = originalHashes(index) - - def getNode(index: Int): MapNode[K, V] = - content(content.length - 1 - index).asInstanceOf[MapNode[K, V]] - - def apply(key: K, originalHash: Int, keyHash: Int, shift: Int): V = { - val mask = maskFrom(keyHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - if (key == getKey(index)) getValue(index) else throw new NoSuchElementException(s"key not found: $key") - } else if ((nodeMap & bitpos) != 0) { - getNode(indexFrom(nodeMap, mask, bitpos)).apply(key, originalHash, keyHash, shift + BitPartitionSize) - } else { - throw new NoSuchElementException(s"key not found: $key") - } - } - - def get(key: K, originalHash: Int, keyHash: Int, shift: Int): Option[V] = { - val mask = maskFrom(keyHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - val key0 = this.getKey(index) - if (key == key0) Some(this.getValue(index)) else None - } else if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - this.getNode(index).get(key, originalHash, keyHash, shift + BitPartitionSize) - } else { - None - } - } - - override def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) = { - val mask = maskFrom(hash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - val payload = getPayload(index) - if (key == payload._1) payload else throw new NoSuchElementException - } else if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - getNode(index).getTuple(key, originalHash, hash, shift + BitPartitionSize) - } else { - throw new NoSuchElementException - } - } - - def getOrElse[V1 >: V](key: K, originalHash: Int, keyHash: Int, shift: Int, f: => V1): V1 = { - val mask = maskFrom(keyHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - val key0 = this.getKey(index) - if (key == key0) getValue(index) else f - } else if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - this.getNode(index).getOrElse(key, originalHash, keyHash, shift + BitPartitionSize, f) - } else { - f - } - } - - override def containsKey(key: K, originalHash: Int, keyHash: Int, shift: Int): Boolean = { - val mask = maskFrom(keyHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - // assert(hashes(index) == computeHash(this.getKey(index)), (hashes.toSeq, content.toSeq, index, key, keyHash, shift)) - (originalHashes(index) == originalHash) && key == getKey(index) - } else if ((nodeMap & bitpos) != 0) { - getNode(indexFrom(nodeMap, mask, bitpos)).containsKey(key, originalHash, keyHash, shift + BitPartitionSize) - } else { - false - } - } - - - def updated[V1 >: V](key: K, value: V1, originalHash: Int, keyHash: Int, shift: Int, replaceValue: Boolean): BitmapIndexedMapNode[K, V1] = { - val mask = maskFrom(keyHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - val key0 = getKey(index) - val key0UnimprovedHash = getHash(index) - if (key0UnimprovedHash == originalHash && key0 == key) { - if (replaceValue) { - val value0 = this.getValue(index) - if ((key0.asInstanceOf[AnyRef] eq key.asInstanceOf[AnyRef]) && (value0.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef])) - this - else copyAndSetValue(bitpos, key, value) - } else this - } else { - val value0 = this.getValue(index) - val key0Hash = improve(key0UnimprovedHash) - val subNodeNew = mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize) - - copyAndMigrateFromInlineToNode(bitpos, key0Hash, subNodeNew) - } - } else if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - val subNode = this.getNode(index) - val subNodeNew = subNode.updated(key, value, originalHash, keyHash, shift + BitPartitionSize, replaceValue) - - if (subNodeNew eq subNode) this else copyAndSetNode(bitpos, subNode, subNodeNew) - } else copyAndInsertValue(bitpos, key, originalHash, keyHash, value) - } - - /** A variant of `updated` which performs shallow mutations on the root (`this`), and if possible, on immediately - * descendant child nodes (only one level beneath `this`) - * - * The caller should pass a bitmap of child nodes of this node, which this method may mutate. - * If this method may mutate a child node, then if the updated key-value belongs in that child node, it will - * be shallowly mutated (its children will not be mutated). - * - * If instead this method may not mutate the child node in which the to-be-updated key-value pair belongs, then - * that child will be updated immutably, but the result will be mutably re-inserted as a child of this node. - * - * @param key the key to update - * @param value the value to set `key` to - * @param originalHash key.## - * @param keyHash the improved hash - * @param shallowlyMutableNodeMap bitmap of child nodes of this node, which can be shallowly mutated - * during the call to this method - * - * @return Int which is the bitwise OR of shallowlyMutableNodeMap and any freshly created nodes, which will be - * available for mutations in subsequent calls. - */ - def updateWithShallowMutations[V1 >: V](key: K, value: V1, originalHash: Int, keyHash: Int, shift: Int, shallowlyMutableNodeMap: Int): Int = { - val mask = maskFrom(keyHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - val key0 = getKey(index) - val key0UnimprovedHash = getHash(index) - if (key0UnimprovedHash == originalHash && key0 == key) { - val value0 = this.getValue(index) - if (!((key0.asInstanceOf[AnyRef] eq key.asInstanceOf[AnyRef]) && (value0.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]))) { - val dataIx = dataIndex(bitpos) - val idx = TupleLength * dataIx - content(idx + 1) = value - } - shallowlyMutableNodeMap - } else { - val value0 = this.getValue(index) - val key0Hash = improve(key0UnimprovedHash) - - val subNodeNew = mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize) - migrateFromInlineToNodeInPlace(bitpos, key0Hash, subNodeNew) - shallowlyMutableNodeMap | bitpos - } - } else if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - val subNode = this.getNode(index) - val subNodeSize = subNode.size - val subNodeHashCode = subNode.cachedJavaKeySetHashCode - - var returnMutableNodeMap = shallowlyMutableNodeMap - - val subNodeNew: MapNode[K, V1] = subNode match { - case subNodeBm: BitmapIndexedMapNode[K, V] if (bitpos & shallowlyMutableNodeMap) != 0 => - subNodeBm.updateWithShallowMutations(key, value, originalHash, keyHash, shift + BitPartitionSize, 0) - subNodeBm - case _ => - val result = subNode.updated(key, value, originalHash, keyHash, shift + BitPartitionSize, replaceValue = true) - if (result ne subNode) { - returnMutableNodeMap |= bitpos - } - result - } - - this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew - this.size = this.size - subNodeSize + subNodeNew.size - this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNodeHashCode + subNodeNew.cachedJavaKeySetHashCode - returnMutableNodeMap - } else { - val dataIx = dataIndex(bitpos) - val idx = TupleLength * dataIx - - val src = this.content - val dst = new Array[Any](src.length + TupleLength) - - // copy 'src' and insert 2 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, idx) - dst(idx) = key - dst(idx + 1) = value - arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) - - this.dataMap |= bitpos - this.content = dst - this.originalHashes = insertElement(originalHashes, dataIx, originalHash) - this.size += 1 - this.cachedJavaKeySetHashCode += keyHash - shallowlyMutableNodeMap - } - } - - def removed[V1 >: V](key: K, originalHash: Int, keyHash: Int, shift: Int): BitmapIndexedMapNode[K, V1] = { - val mask = maskFrom(keyHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - val key0 = this.getKey(index) - - if (key0 == key) { - if (this.payloadArity == 2 && this.nodeArity == 0) { - /* - * Create new node with remaining pair. The new node will a) either become the new root - * returned, or b) unwrapped and inlined during returning. - */ - val newDataMap = if (shift == 0) (dataMap ^ bitpos) else bitposFrom(maskFrom(keyHash, 0)) - if (index == 0) - new BitmapIndexedMapNode[K, V1](newDataMap, 0, Array(getKey(1), getValue(1)), Array(originalHashes(1)), 1, improve(getHash(1))) - else - new BitmapIndexedMapNode[K, V1](newDataMap, 0, Array(getKey(0), getValue(0)), Array(originalHashes(0)), 1, improve(getHash(0))) - } else copyAndRemoveValue(bitpos, keyHash) - } else this - } else if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - val subNode = this.getNode(index) - - val subNodeNew = subNode.removed(key, originalHash, keyHash, shift + BitPartitionSize) - // assert(subNodeNew.size != 0, "Sub-node must have at least one element.") - - if (subNodeNew eq subNode) return this - - // cache just in case subNodeNew is a hashCollision node, in which in which case a little arithmetic is avoided - // in Vector#length - val subNodeNewSize = subNodeNew.size - - if (subNodeNewSize == 1) { - if (this.size == subNode.size) { - // subNode is the only child (no other data or node children of `this` exist) - // escalate (singleton or empty) result - subNodeNew.asInstanceOf[BitmapIndexedMapNode[K, V]] - } else { - // inline value (move to front) - copyAndMigrateFromNodeToInline(bitpos, subNode, subNodeNew) - } - } else if (subNodeNewSize > 1) { - // modify current node (set replacement node) - copyAndSetNode(bitpos, subNode, subNodeNew) - } else this - } else this - } - - def mergeTwoKeyValPairs[V1 >: V](key0: K, value0: V1, originalHash0: Int, keyHash0: Int, key1: K, value1: V1, originalHash1: Int, keyHash1: Int, shift: Int): MapNode[K, V1] = { - // assert(key0 != key1) - - if (shift >= HashCodeLength) { - new HashCollisionMapNode[K, V1](originalHash0, keyHash0, Vector((key0, value0), (key1, value1))) - } else { - val mask0 = maskFrom(keyHash0, shift) - val mask1 = maskFrom(keyHash1, shift) - val newCachedHash = keyHash0 + keyHash1 - - if (mask0 != mask1) { - // unique prefixes, payload fits on same level - val dataMap = bitposFrom(mask0) | bitposFrom(mask1) - - if (mask0 < mask1) { - new BitmapIndexedMapNode[K, V1](dataMap, 0, Array(key0, value0, key1, value1), Array(originalHash0, originalHash1), 2, newCachedHash) - } else { - new BitmapIndexedMapNode[K, V1](dataMap, 0, Array(key1, value1, key0, value0), Array(originalHash1, originalHash0), 2, newCachedHash) - } - } else { - // identical prefixes, payload must be disambiguated deeper in the trie - val nodeMap = bitposFrom(mask0) - val node = mergeTwoKeyValPairs(key0, value0, originalHash0, keyHash0, key1, value1, originalHash1, keyHash1, shift + BitPartitionSize) - new BitmapIndexedMapNode[K, V1](0, nodeMap, Array(node), Array.emptyIntArray, node.size, node.cachedJavaKeySetHashCode) - } - } - } - - def hasNodes: Boolean = nodeMap != 0 - - def nodeArity: Int = bitCount(nodeMap) - - def hasPayload: Boolean = dataMap != 0 - - def payloadArity: Int = bitCount(dataMap) - - def dataIndex(bitpos: Int) = bitCount(dataMap & (bitpos - 1)) - - def nodeIndex(bitpos: Int) = bitCount(nodeMap & (bitpos - 1)) - - def copyAndSetValue[V1 >: V](bitpos: Int, newKey: K, newValue: V1): BitmapIndexedMapNode[K, V1] = { - val dataIx = dataIndex(bitpos) - val idx = TupleLength * dataIx - - val src = this.content - val dst = new Array[Any](src.length) - - // copy 'src' and set 1 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, src.length) - //dst(idx) = newKey - dst(idx + 1) = newValue - new BitmapIndexedMapNode[K, V1](dataMap, nodeMap, dst, originalHashes, size, cachedJavaKeySetHashCode) - } - - def copyAndSetNode[V1 >: V](bitpos: Int, oldNode: MapNode[K, V1], newNode: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = { - val idx = this.content.length - 1 - this.nodeIndex(bitpos) - - val src = this.content - val dst = new Array[Any](src.length) - - // copy 'src' and set 1 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, src.length) - dst(idx) = newNode - new BitmapIndexedMapNode[K, V1]( - dataMap, - nodeMap, - dst, - originalHashes, - size - oldNode.size + newNode.size, - cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + newNode.cachedJavaKeySetHashCode - ) - } - - def copyAndInsertValue[V1 >: V](bitpos: Int, key: K, originalHash: Int, keyHash: Int, value: V1): BitmapIndexedMapNode[K, V1] = { - val dataIx = dataIndex(bitpos) - val idx = TupleLength * dataIx - - val src = this.content - val dst = new Array[Any](src.length + TupleLength) - - // copy 'src' and insert 2 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, idx) - dst(idx) = key - dst(idx + 1) = value - arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) - - val dstHashes = insertElement(originalHashes, dataIx, originalHash) - - new BitmapIndexedMapNode[K, V1](dataMap | bitpos, nodeMap, dst, dstHashes, size + 1, cachedJavaKeySetHashCode + keyHash) - } - - def copyAndRemoveValue(bitpos: Int, keyHash: Int): BitmapIndexedMapNode[K, V] = { - val dataIx = dataIndex(bitpos) - val idx = TupleLength * dataIx - - val src = this.content - val dst = new Array[Any](src.length - TupleLength) - - // copy 'src' and remove 2 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, idx) - arraycopy(src, idx + TupleLength, dst, idx, src.length - idx - TupleLength) - - val dstHashes = removeElement(originalHashes, dataIx) - - new BitmapIndexedMapNode[K, V](dataMap ^ bitpos, nodeMap, dst, dstHashes, size - 1, cachedJavaKeySetHashCode - keyHash) - } - - /** Variant of `copyAndMigrateFromInlineToNode` which mutates `this` rather than returning a new node. - * - * @param bitpos the bit position of the data to migrate to node - * @param keyHash the improved hash of the key currently at `bitpos` - * @param node the node to place at `bitpos` beneath `this` - */ - def migrateFromInlineToNodeInPlace[V1 >: V](bitpos: Int, keyHash: Int, node: MapNode[K, V1]): this.type = { - val dataIx = dataIndex(bitpos) - val idxOld = TupleLength * dataIx - val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) - - val src = this.content - val dst = new Array[Any](src.length - TupleLength + 1) - - // copy 'src' and remove 2 element(s) at position 'idxOld' and - // insert 1 element(s) at position 'idxNew' - // assert(idxOld <= idxNew) - arraycopy(src, 0, dst, 0, idxOld) - arraycopy(src, idxOld + TupleLength, dst, idxOld, idxNew - idxOld) - dst(idxNew) = node - arraycopy(src, idxNew + TupleLength, dst, idxNew + 1, src.length - idxNew - TupleLength) - - val dstHashes = removeElement(originalHashes, dataIx) - - this.dataMap = dataMap ^ bitpos - this.nodeMap = nodeMap | bitpos - this.content = dst - this.originalHashes = dstHashes - this.size = size - 1 + node.size - this.cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode - this - } - - def copyAndMigrateFromInlineToNode[V1 >: V](bitpos: Int, keyHash: Int, node: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = { - val dataIx = dataIndex(bitpos) - val idxOld = TupleLength * dataIx - val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) - - val src = this.content - val dst = new Array[Any](src.length - TupleLength + 1) - - // copy 'src' and remove 2 element(s) at position 'idxOld' and - // insert 1 element(s) at position 'idxNew' - // assert(idxOld <= idxNew) - arraycopy(src, 0, dst, 0, idxOld) - arraycopy(src, idxOld + TupleLength, dst, idxOld, idxNew - idxOld) - dst(idxNew) = node - arraycopy(src, idxNew + TupleLength, dst, idxNew + 1, src.length - idxNew - TupleLength) - - val dstHashes = removeElement(originalHashes, dataIx) - - new BitmapIndexedMapNode[K, V1]( - dataMap = dataMap ^ bitpos, - nodeMap = nodeMap | bitpos, - content = dst, - originalHashes = dstHashes, - size = size - 1 + node.size, - cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode - ) - } - - def copyAndMigrateFromNodeToInline[V1 >: V](bitpos: Int, oldNode: MapNode[K, V1], node: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = { - val idxOld = this.content.length - 1 - nodeIndex(bitpos) - val dataIxNew = dataIndex(bitpos) - val idxNew = TupleLength * dataIxNew - - val key = node.getKey(0) - val value = node.getValue(0) - val src = this.content - val dst = new Array[Any](src.length - 1 + TupleLength) - - // copy 'src' and remove 1 element(s) at position 'idxOld' and - // insert 2 element(s) at position 'idxNew' - // assert(idxOld >= idxNew) - arraycopy(src, 0, dst, 0, idxNew) - dst(idxNew) = key - dst(idxNew + 1) = value - arraycopy(src, idxNew, dst, idxNew + TupleLength, idxOld - idxNew) - arraycopy(src, idxOld + 1, dst, idxOld + TupleLength, src.length - idxOld - 1) - val hash = node.getHash(0) - val dstHashes = insertElement(originalHashes, dataIxNew, hash) - new BitmapIndexedMapNode[K, V1]( - dataMap = dataMap | bitpos, - nodeMap = nodeMap ^ bitpos, - content = dst, - originalHashes = dstHashes, - size = size - oldNode.size + 1, - cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode - ) - } - - override def foreach[U](f: ((K, V)) => U): Unit = { - val iN = payloadArity // arity doesn't change during this operation - var i = 0 - while (i < iN) { - f(getPayload(i)) - i += 1 - } - - val jN = nodeArity // arity doesn't change during this operation - var j = 0 - while (j < jN) { - getNode(j).foreach(f) - j += 1 - } - } - - override def foreachEntry[U](f: (K, V) => U): Unit = { - val iN = payloadArity // arity doesn't change during this operation - var i = 0 - while (i < iN) { - f(getKey(i), getValue(i)) - i += 1 - } - - val jN = nodeArity // arity doesn't change during this operation - var j = 0 - while (j < jN) { - getNode(j).foreachEntry(f) - j += 1 - } - } - - override def foreachWithHash(f: (K, V, Int) => Unit): Unit = { - var i = 0 - val iN = payloadArity // arity doesn't change during this operation - while (i < iN) { - f(getKey(i), getValue(i), getHash(i)) - i += 1 - } - - val jN = nodeArity // arity doesn't change during this operation - var j = 0 - while (j < jN) { - getNode(j).foreachWithHash(f) - j += 1 - } - } - override def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit = { - var i = 0 - val iN = payloadArity - val jN = nodeArity - while (i < iN) { - builder.addOne(getKey(i), getValue(i), getHash(i)) - i += 1 - } - - var j = 0 - while (j < jN) { - getNode(j).buildTo(builder) - j += 1 - } - } - - override def transform[W](f: (K, V) => W): BitmapIndexedMapNode[K, W] = { - var newContent: Array[Any] = null - val iN = payloadArity // arity doesn't change during this operation - val jN = nodeArity // arity doesn't change during this operation - val newContentLength = content.length - var i = 0 - while (i < iN) { - val key = getKey(i) - val value = getValue(i) - val newValue = f(key, value) - if (newContent eq null) { - if (newValue.asInstanceOf[AnyRef] ne value.asInstanceOf[AnyRef]) { - newContent = content.clone() - newContent(TupleLength * i + 1) = newValue - } - } else { - newContent(TupleLength * i + 1) = newValue - } - i += 1 - } - - var j = 0 - while (j < jN) { - val node = getNode(j) - val newNode = node.transform(f) - if (newContent eq null) { - if (newNode ne node) { - newContent = content.clone() - newContent(newContentLength - j - 1) = newNode - } - } else - newContent(newContentLength - j - 1) = newNode - j += 1 - } - if (newContent eq null) this.asInstanceOf[BitmapIndexedMapNode[K, W]] - else new BitmapIndexedMapNode[K, W](dataMap, nodeMap, newContent, originalHashes, size, cachedJavaKeySetHashCode) - } - - override def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit = that match { - case bm: BitmapIndexedMapNode[K, V] @unchecked => - if (size == 0) { - that.buildTo(builder) - return - } else if (bm.size == 0) { - buildTo(builder) - return - } - - val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap - - val minIndex: Int = Integer.numberOfTrailingZeros(allMap) - val maxIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - - { - var index = minIndex - var leftIdx = 0 - var rightIdx = 0 - - while (index < maxIndex) { - val bitpos = bitposFrom(index) - - if ((bitpos & dataMap) != 0) { - val leftKey = getKey(leftIdx) - val leftValue = getValue(leftIdx) - val leftOriginalHash = getHash(leftIdx) - if ((bitpos & bm.dataMap) != 0) { - // left data and right data - val rightKey = bm.getKey(rightIdx) - val rightValue = bm.getValue(rightIdx) - val rightOriginalHash = bm.getHash(rightIdx) - if (leftOriginalHash == rightOriginalHash && leftKey == rightKey) { - builder.addOne(mergef((leftKey, leftValue), (rightKey, rightValue))) - } else { - builder.addOne(leftKey, leftValue, leftOriginalHash) - builder.addOne(rightKey, rightValue, rightOriginalHash) - } - rightIdx += 1 - } else if ((bitpos & bm.nodeMap) != 0) { - // left data and right node - val subNode = bm.getNode(bm.nodeIndex(bitpos)) - val leftImprovedHash = improve(leftOriginalHash) - val removed = subNode.removed(leftKey, leftOriginalHash, leftImprovedHash, shift + BitPartitionSize) - if (removed eq subNode) { - // no overlap in leftData and rightNode, just build both children to builder - subNode.buildTo(builder) - builder.addOne(leftKey, leftValue, leftOriginalHash, leftImprovedHash) - } else { - // there is collision, so special treatment for that key - removed.buildTo(builder) - builder.addOne(mergef((leftKey, leftValue), subNode.getTuple(leftKey, leftOriginalHash, leftImprovedHash, shift + BitPartitionSize))) - } - } else { - // left data and nothing on right - builder.addOne(leftKey, leftValue, leftOriginalHash) - } - leftIdx += 1 - } else if ((bitpos & nodeMap) != 0) { - if ((bitpos & bm.dataMap) != 0) { - // left node and right data - val rightKey = bm.getKey(rightIdx) - val rightValue = bm.getValue(rightIdx) - val rightOriginalHash = bm.getHash(rightIdx) - val rightImprovedHash = improve(rightOriginalHash) - - val subNode = getNode(nodeIndex(bitpos)) - val removed = subNode.removed(rightKey, rightOriginalHash, rightImprovedHash, shift + BitPartitionSize) - if (removed eq subNode) { - // no overlap in leftNode and rightData, just build both children to builder - subNode.buildTo(builder) - builder.addOne(rightKey, rightValue, rightOriginalHash, rightImprovedHash) - } else { - // there is collision, so special treatment for that key - removed.buildTo(builder) - builder.addOne(mergef(subNode.getTuple(rightKey, rightOriginalHash, rightImprovedHash, shift + BitPartitionSize), (rightKey, rightValue))) - } - rightIdx += 1 - - } else if ((bitpos & bm.nodeMap) != 0) { - // left node and right node - getNode(nodeIndex(bitpos)).mergeInto(bm.getNode(bm.nodeIndex(bitpos)), builder, shift + BitPartitionSize)(mergef) - } else { - // left node and nothing on right - getNode(nodeIndex(bitpos)).buildTo(builder) - } - } else if ((bitpos & bm.dataMap) != 0) { - // nothing on left, right data - val dataIndex = bm.dataIndex(bitpos) - builder.addOne(bm.getKey(dataIndex),bm.getValue(dataIndex), bm.getHash(dataIndex)) - rightIdx += 1 - - } else if ((bitpos & bm.nodeMap) != 0) { - // nothing on left, right node - bm.getNode(bm.nodeIndex(bitpos)).buildTo(builder) - } - - index += 1 - } - } - case _: HashCollisionMapNode[_, _] => - throw new Exception("Cannot merge BitmapIndexedMapNode with HashCollisionMapNode") - } - - override def equals(that: Any): Boolean = - that match { - case node: BitmapIndexedMapNode[_, _] => - (this eq node) || - (this.cachedJavaKeySetHashCode == node.cachedJavaKeySetHashCode) && - (this.nodeMap == node.nodeMap) && - (this.dataMap == node.dataMap) && - (this.size == node.size) && - java.util.Arrays.equals(this.originalHashes, node.originalHashes) && - deepContentEquality(this.content, node.content, content.length) - case _ => false - } - - @`inline` private def deepContentEquality(a1: Array[Any], a2: Array[Any], length: Int): Boolean = { - if (a1 eq a2) - true - else { - var isEqual = true - var i = 0 - - while (isEqual && i < length) { - isEqual = a1(i) == a2(i) - i += 1 - } - - isEqual - } - } - - override def hashCode(): Int = - throw new UnsupportedOperationException("Trie nodes do not support hashing.") - - override def concat[V1 >: V](that: MapNode[K, V1], shift: Int): BitmapIndexedMapNode[K, V1] = that match { - case bm: BitmapIndexedMapNode[K, V] @unchecked => - if (size == 0) return bm - else if (bm.size == 0 || (bm eq this)) return this - else if (bm.size == 1) { - val originalHash = bm.getHash(0) - return this.updated(bm.getKey(0), bm.getValue(0), originalHash, improve(originalHash), shift, replaceValue = true) - } - // if we go through the merge and the result does not differ from `bm`, we can just return `bm`, to improve sharing - // So, `anyChangesMadeSoFar` will be set to `true` as soon as we encounter a difference between the - // currently-being-computed result, and `bm` - var anyChangesMadeSoFar = false - - val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap - - // minimumIndex is inclusive -- it is the first index for which there is data or nodes - val minimumBitPos: Int = Node.bitposFrom(Integer.numberOfTrailingZeros(allMap)) - // maximumIndex is inclusive -- it is the last index for which there is data or nodes - // it could not be exclusive, because then upper bound in worst case (Node.BranchingFactor) would be out-of-bound - // of int bitposition representation - val maximumBitPos: Int = Node.bitposFrom(Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - 1) - - var leftNodeRightNode = 0 - var leftDataRightNode = 0 - var leftNodeRightData = 0 - var leftDataOnly = 0 - var rightDataOnly = 0 - var leftNodeOnly = 0 - var rightNodeOnly = 0 - var leftDataRightDataMigrateToNode = 0 - var leftDataRightDataRightOverwrites = 0 - - var dataToNodeMigrationTargets = 0 - - { - var bitpos = minimumBitPos - var leftIdx = 0 - var rightIdx = 0 - var finished = false - - while (!finished) { - - if ((bitpos & dataMap) != 0) { - if ((bitpos & bm.dataMap) != 0) { - val leftOriginalHash = getHash(leftIdx) - if (leftOriginalHash == bm.getHash(rightIdx) && getKey(leftIdx) == bm.getKey(rightIdx)) { - leftDataRightDataRightOverwrites |= bitpos - } else { - leftDataRightDataMigrateToNode |= bitpos - dataToNodeMigrationTargets |= Node.bitposFrom(Node.maskFrom(improve(leftOriginalHash), shift)) - } - rightIdx += 1 - } else if ((bitpos & bm.nodeMap) != 0) { - leftDataRightNode |= bitpos - } else { - leftDataOnly |= bitpos - } - leftIdx += 1 - } else if ((bitpos & nodeMap) != 0) { - if ((bitpos & bm.dataMap) != 0) { - leftNodeRightData |= bitpos - rightIdx += 1 - } else if ((bitpos & bm.nodeMap) != 0) { - leftNodeRightNode |= bitpos - } else { - leftNodeOnly |= bitpos - } - } else if ((bitpos & bm.dataMap) != 0) { - rightDataOnly |= bitpos - rightIdx += 1 - } else if ((bitpos & bm.nodeMap) != 0) { - rightNodeOnly |= bitpos - } - - if (bitpos == maximumBitPos) { - finished = true - } else { - bitpos = bitpos << 1 - } - } - } - - - val newDataMap = leftDataOnly | rightDataOnly | leftDataRightDataRightOverwrites - - val newNodeMap = - leftNodeRightNode | - leftDataRightNode | - leftNodeRightData | - leftNodeOnly | - rightNodeOnly | - dataToNodeMigrationTargets - - - if ((newDataMap == (rightDataOnly | leftDataRightDataRightOverwrites)) && (newNodeMap == rightNodeOnly)) { - // nothing from `this` will make it into the result -- return early - return bm - } - - val newDataSize = bitCount(newDataMap) - val newContentSize = (MapNode.TupleLength * newDataSize) + bitCount(newNodeMap) - - val newContent = new Array[Any](newContentSize) - val newOriginalHashes = new Array[Int](newDataSize) - var newSize = 0 - var newCachedHashCode = 0 - - { - var leftDataIdx = 0 - var rightDataIdx = 0 - var leftNodeIdx = 0 - var rightNodeIdx = 0 - - val nextShift = shift + Node.BitPartitionSize - - var compressedDataIdx = 0 - var compressedNodeIdx = 0 - - var bitpos = minimumBitPos - var finished = false - - while (!finished) { - - if ((bitpos & leftNodeRightNode) != 0) { - val rightNode = bm.getNode(rightNodeIdx) - val newNode = getNode(leftNodeIdx).concat(rightNode, nextShift) - if (rightNode ne newNode) { - anyChangesMadeSoFar = true - } - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - rightNodeIdx += 1 - leftNodeIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - - } else if ((bitpos & leftDataRightNode) != 0) { - val newNode = { - val n = bm.getNode(rightNodeIdx) - val leftKey = getKey(leftDataIdx) - val leftValue = getValue(leftDataIdx) - val leftOriginalHash = getHash(leftDataIdx) - val leftImproved = improve(leftOriginalHash) - - val updated = n.updated(leftKey, leftValue, leftOriginalHash, leftImproved, nextShift, replaceValue = false) - - if (updated ne n) { - anyChangesMadeSoFar = true - } - - updated - } - - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - rightNodeIdx += 1 - leftDataIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - } - else if ((bitpos & leftNodeRightData) != 0) { - anyChangesMadeSoFar = true - val newNode = { - val rightOriginalHash = bm.getHash(rightDataIdx) - getNode(leftNodeIdx).updated( - key = bm.getKey(rightDataIdx), - value = bm.getValue(rightDataIdx), - originalHash = bm.getHash(rightDataIdx), - hash = improve(rightOriginalHash), - shift = nextShift, - replaceValue = true - ) - } - - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - leftNodeIdx += 1 - rightDataIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - - } else if ((bitpos & leftDataOnly) != 0) { - anyChangesMadeSoFar = true - val originalHash = originalHashes(leftDataIdx) - newContent(MapNode.TupleLength * compressedDataIdx) = getKey(leftDataIdx).asInstanceOf[AnyRef] - newContent(MapNode.TupleLength * compressedDataIdx + 1) = getValue(leftDataIdx).asInstanceOf[AnyRef] - newOriginalHashes(compressedDataIdx) = originalHash - - compressedDataIdx += 1 - leftDataIdx += 1 - newSize += 1 - newCachedHashCode += improve(originalHash) - } else if ((bitpos & rightDataOnly) != 0) { - val originalHash = bm.originalHashes(rightDataIdx) - newContent(MapNode.TupleLength * compressedDataIdx) = bm.getKey(rightDataIdx).asInstanceOf[AnyRef] - newContent(MapNode.TupleLength * compressedDataIdx + 1) = bm.getValue(rightDataIdx).asInstanceOf[AnyRef] - newOriginalHashes(compressedDataIdx) = originalHash - - compressedDataIdx += 1 - rightDataIdx += 1 - newSize += 1 - newCachedHashCode += improve(originalHash) - } else if ((bitpos & leftNodeOnly) != 0) { - anyChangesMadeSoFar = true - val newNode = getNode(leftNodeIdx) - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - leftNodeIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - } else if ((bitpos & rightNodeOnly) != 0) { - val newNode = bm.getNode(rightNodeIdx) - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - rightNodeIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - } else if ((bitpos & leftDataRightDataMigrateToNode) != 0) { - anyChangesMadeSoFar = true - val newNode = { - val leftOriginalHash = getHash(leftDataIdx) - val rightOriginalHash = bm.getHash(rightDataIdx) - - bm.mergeTwoKeyValPairs( - getKey(leftDataIdx), getValue(leftDataIdx), leftOriginalHash, improve(leftOriginalHash), - bm.getKey(rightDataIdx), bm.getValue(rightDataIdx), rightOriginalHash, improve(rightOriginalHash), - nextShift - ) - } - - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - leftDataIdx += 1 - rightDataIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - } else if ((bitpos & leftDataRightDataRightOverwrites) != 0) { - val originalHash = bm.originalHashes(rightDataIdx) - newContent(MapNode.TupleLength * compressedDataIdx) = bm.getKey(rightDataIdx).asInstanceOf[AnyRef] - newContent(MapNode.TupleLength * compressedDataIdx + 1) = bm.getValue(rightDataIdx).asInstanceOf[AnyRef] - newOriginalHashes(compressedDataIdx) = originalHash - - compressedDataIdx += 1 - rightDataIdx += 1 - newSize += 1 - newCachedHashCode += improve(originalHash) - leftDataIdx += 1 - } - - if (bitpos == maximumBitPos) { - finished = true - } else { - bitpos = bitpos << 1 - } - } - } - - if (anyChangesMadeSoFar) - new BitmapIndexedMapNode( - dataMap = newDataMap, - nodeMap = newNodeMap, - content = newContent, - originalHashes = newOriginalHashes, - size = newSize, - cachedJavaKeySetHashCode = newCachedHashCode - ) - else bm - - case _ => - // should never happen -- hash collisions are never at the same level as bitmapIndexedMapNodes - throw new UnsupportedOperationException("Cannot concatenate a HashCollisionMapNode with a BitmapIndexedMapNode") - } - - override def copy(): BitmapIndexedMapNode[K, V] = { - val contentClone = content.clone() - val contentLength = contentClone.length - var i = bitCount(dataMap) * TupleLength - while (i < contentLength) { - contentClone(i) = contentClone(i).asInstanceOf[MapNode[K, V]].copy() - i += 1 - } - new BitmapIndexedMapNode[K, V](dataMap, nodeMap, contentClone, originalHashes.clone(), size, cachedJavaKeySetHashCode) - } - - override def filterImpl(pred: ((K, V)) => Boolean, flipped: Boolean): BitmapIndexedMapNode[K, V] = { - if (size == 0) this - else if (size == 1) { - if (pred(getPayload(0)) != flipped) this else MapNode.empty - } else if (nodeMap == 0) { - // Performance optimization for nodes of depth 1: - // - // this node has no "node" children, all children are inlined data elems, therefor logic is significantly simpler - // approach: - // * traverse the content array, accumulating in `newDataMap: Int` any bit positions of keys which pass the filter - // * (bitCount(newDataMap) * TupleLength) tells us the new content array and originalHashes array size, so now perform allocations - // * traverse the content array once more, placing each passing element (according to `newDatamap`) in the new content and originalHashes arrays - // - // note: - // * this optimization significantly improves performance of not only small trees, but also larger trees, since - // even non-root nodes are affected by this improvement, and large trees will consist of many nodes as - // descendants - // - val minimumIndex: Int = Integer.numberOfTrailingZeros(dataMap) - val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(dataMap) - - var newDataMap = 0 - var newCachedHashCode = 0 - var dataIndex = 0 - - var i = minimumIndex - - while(i < maximumIndex) { - val bitpos = bitposFrom(i) - - if ((bitpos & dataMap) != 0) { - val payload = getPayload(dataIndex) - val passed = pred(payload) != flipped - - if (passed) { - newDataMap |= bitpos - newCachedHashCode += improve(getHash(dataIndex)) - } - - dataIndex += 1 - } - - i += 1 - } - - if (newDataMap == 0) { - MapNode.empty - } else if (newDataMap == dataMap) { - this - } else { - val newSize = Integer.bitCount(newDataMap) - val newContent = new Array[Any](newSize * TupleLength) - val newOriginalHashCodes = new Array[Int](newSize) - val newMaximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(newDataMap) - - var j = Integer.numberOfTrailingZeros(newDataMap) - - var newDataIndex = 0 - - - while (j < newMaximumIndex) { - val bitpos = bitposFrom(j) - if ((bitpos & newDataMap) != 0) { - val oldIndex = indexFrom(dataMap, bitpos) - newContent(newDataIndex * TupleLength) = content(oldIndex * TupleLength) - newContent(newDataIndex * TupleLength + 1) = content(oldIndex * TupleLength + 1) - newOriginalHashCodes(newDataIndex) = originalHashes(oldIndex) - newDataIndex += 1 - } - j += 1 - } - - new BitmapIndexedMapNode(newDataMap, 0, newContent, newOriginalHashCodes, newSize, newCachedHashCode) - } - - - } else { - val allMap = dataMap | nodeMap - val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap) - val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - - var oldDataPassThrough = 0 - - // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data - var nodeMigrateToDataTargetMap = 0 - // the queue of single-element, post-filter nodes - var nodesToMigrateToData: mutable.Queue[MapNode[K, V]] = null - - // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node - var nodesToPassThroughMap = 0 - - // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself - // These are stored for later inclusion into the final `content` array - // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) - var mapOfNewNodes = 0 - // each bit in `mapOfNewNodes` corresponds to one element in this queue - var newNodes: mutable.Queue[MapNode[K, V]] = null - - var newDataMap = 0 - var newNodeMap = 0 - var newSize = 0 - var newCachedHashCode = 0 - - var dataIndex = 0 - var nodeIndex = 0 - - var i = minimumIndex - while (i < maximumIndex) { - val bitpos = bitposFrom(i) - - if ((bitpos & dataMap) != 0) { - val payload = getPayload(dataIndex) - val passed = pred(payload) != flipped - - if (passed) { - newDataMap |= bitpos - oldDataPassThrough |= bitpos - newSize += 1 - newCachedHashCode += improve(getHash(dataIndex)) - } - - dataIndex += 1 - } else if ((bitpos & nodeMap) != 0) { - val oldSubNode = getNode(nodeIndex) - val newSubNode = oldSubNode.filterImpl(pred, flipped) - - newSize += newSubNode.size - newCachedHashCode += newSubNode.cachedJavaKeySetHashCode - - // if (newSubNode.size == 0) do nothing (drop it) - if (newSubNode.size > 1) { - newNodeMap |= bitpos - if (oldSubNode eq newSubNode) { - nodesToPassThroughMap |= bitpos - } else { - mapOfNewNodes |= bitpos - if (newNodes eq null) { - newNodes = mutable.Queue.empty[MapNode[K, V] @uncheckedCaptures] - } - newNodes += newSubNode - } - } else if (newSubNode.size == 1) { - newDataMap |= bitpos - nodeMigrateToDataTargetMap |= bitpos - if (nodesToMigrateToData eq null) { - nodesToMigrateToData = mutable.Queue() - } - nodesToMigrateToData += newSubNode - } - - nodeIndex += 1 - } - - i += 1 - } - - if (newSize == 0) { - MapNode.empty - } else if (newSize == size) { - this - } else { - val newDataSize = bitCount(newDataMap) - val newContentSize = (MapNode.TupleLength * newDataSize) + bitCount(newNodeMap) - val newContent = new Array[Any](newContentSize) - val newOriginalHashes = new Array[Int](newDataSize) - - val newAllMap = newDataMap | newNodeMap - val maxIndex = Node.BranchingFactor - Integer.numberOfLeadingZeros(newAllMap) - - // note: We MUST start from the minimum index in the old (`this`) node, otherwise `old{Node,Data}Index` will - // not be incremented properly. Otherwise we could have started at Integer.numberOfTrailingZeroes(newAllMap) - var i = minimumIndex - - var oldDataIndex = 0 - var oldNodeIndex = 0 - - var newDataIndex = 0 - var newNodeIndex = 0 - - while (i < maxIndex) { - val bitpos = bitposFrom(i) - - if ((bitpos & oldDataPassThrough) != 0) { - newContent(newDataIndex * TupleLength) = getKey(oldDataIndex) - newContent(newDataIndex * TupleLength + 1) = getValue(oldDataIndex) - newOriginalHashes(newDataIndex) = getHash(oldDataIndex) - newDataIndex += 1 - oldDataIndex += 1 - } else if ((bitpos & nodesToPassThroughMap) != 0) { - newContent(newContentSize - newNodeIndex - 1) = getNode(oldNodeIndex) - newNodeIndex += 1 - oldNodeIndex += 1 - } else if ((bitpos & nodeMigrateToDataTargetMap) != 0) { - // we need not check for null here. If nodeMigrateToDataTargetMap != 0, then nodesMigrateToData must not be null - val node = nodesToMigrateToData.dequeue() - newContent(TupleLength * newDataIndex) = node.getKey(0) - newContent(TupleLength * newDataIndex + 1) = node.getValue(0) - newOriginalHashes(newDataIndex) = node.getHash(0) - newDataIndex += 1 - oldNodeIndex += 1 - } else if ((bitpos & mapOfNewNodes) != 0) { - newContent(newContentSize - newNodeIndex - 1) = newNodes.dequeue() - newNodeIndex += 1 - oldNodeIndex += 1 - } else if ((bitpos & dataMap) != 0) { - oldDataIndex += 1 - } else if ((bitpos & nodeMap) != 0) { - oldNodeIndex += 1 - } - - i += 1 - } - - new BitmapIndexedMapNode[K, V](newDataMap, newNodeMap, newContent, newOriginalHashes, newSize, newCachedHashCode) - } - } - } -} - -private final class HashCollisionMapNode[K, +V ]( - val originalHash: Int, - val hash: Int, - var content: Vector[(K, V @uV) @uncheckedCaptures] - ) extends MapNode[K, V] { - - import Node._ - - require(content.length >= 2) - - releaseFence() - - private[immutable] def indexOf(key: Any): Int = { - val iter = content.iterator - var i = 0 - while (iter.hasNext) { - if (iter.next()._1 == key) return i - i += 1 - } - -1 - } - - def size: Int = content.length - - def apply(key: K, originalHash: Int, hash: Int, shift: Int): V = get(key, originalHash, hash, shift).getOrElse(throw new NoSuchElementException) - - def get(key: K, originalHash: Int, hash: Int, shift: Int): Option[V] = - if (this.hash == hash) { - val index = indexOf(key) - if (index >= 0) Some(content(index)._2) else None - } else None - - override def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) = { - val index = indexOf(key) - if (index >= 0) content(index) else throw new NoSuchElementException - } - - def getOrElse[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int, f: => V1): V1 = { - if (this.hash == hash) { - indexOf(key) match { - case -1 => f - case other => content(other)._2 - } - } else f - } - - override def containsKey(key: K, originalHash: Int, hash: Int, shift: Int): Boolean = - this.hash == hash && indexOf(key) >= 0 - - def contains[V1 >: V](key: K, value: V1, hash: Int, shift: Int): Boolean = - this.hash == hash && { - val index = indexOf(key) - index >= 0 && (content(index)._2.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) - } - - def updated[V1 >: V](key: K, value: V1, originalHash: Int, hash: Int, shift: Int, replaceValue: Boolean): MapNode[K, V1] = { - val index = indexOf(key) - if (index >= 0) { - if (replaceValue) { - if (content(index)._2.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) { - this - } else { - new HashCollisionMapNode[K, V1](originalHash, hash, content.updated[(K, V1)](index, (key, value))) - } - } else { - this - } - } else { - new HashCollisionMapNode[K, V1](originalHash, hash, content.appended[(K, V1)]((key, value))) - } - } - - def removed[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int): MapNode[K, V1] = { - if (!this.containsKey(key, originalHash, hash, shift)) { - this - } else { - val updatedContent = content.filterNot(keyValuePair => keyValuePair._1 == key) - // assert(updatedContent.size == content.size - 1) - - updatedContent.size match { - case 1 => - val (k, v) = updatedContent(0) - new BitmapIndexedMapNode[K, V1](bitposFrom(maskFrom(hash, 0)), 0, Array(k, v), Array(originalHash), 1, hash) - case _ => new HashCollisionMapNode[K, V1](originalHash, hash, updatedContent) - } - } - } - - def hasNodes: Boolean = false - - def nodeArity: Int = 0 - - def getNode(index: Int): MapNode[K, V] = - throw new IndexOutOfBoundsException("No sub-nodes present in hash-collision leaf node.") - - def hasPayload: Boolean = true - - def payloadArity: Int = content.length - - def getKey(index: Int): K = getPayload(index)._1 - def getValue(index: Int): V = getPayload(index)._2 - - def getPayload(index: Int): (K, V) = content(index) - - override def getHash(index: Int): Int = originalHash - - def foreach[U](f: ((K, V)) => U): Unit = content.foreach(f) - - def foreachEntry[U](f: (K, V) => U): Unit = content.foreach { case (k, v) => f(k, v)} - - override def foreachWithHash(f: (K, V, Int) => Unit): Unit = { - val iter = content.iterator - while (iter.hasNext) { - val next = iter.next() - f(next._1, next._2, originalHash) - } - } - - override def transform[W](f: (K, V) => W): HashCollisionMapNode[K, W] = { - val newContent = Vector.newBuilder[(K, W)] - val contentIter = content.iterator - // true if any values have been transformed to a different value via `f` - var anyChanges = false - while(contentIter.hasNext) { - val (k, v) = contentIter.next() - val newValue = f(k, v) - newContent.addOne((k, newValue)) - anyChanges ||= (v.asInstanceOf[AnyRef] ne newValue.asInstanceOf[AnyRef]) - } - if (anyChanges) new HashCollisionMapNode(originalHash, hash, newContent.result()) - else this.asInstanceOf[HashCollisionMapNode[K, W]] - } - - override def equals(that: Any): Boolean = - that match { - case node: HashCollisionMapNode[_, _] => - (this eq node) || - (this.hash == node.hash) && - (this.content.length == node.content.length) && { - val iter = content.iterator - while (iter.hasNext) { - val (key, value) = iter.next() - val index = node.indexOf(key) - if (index < 0 || value != node.content(index)._2) { - return false - } - } - true - } - case _ => false - } - - override def concat[V1 >: V](that: MapNode[K, V1], shift: Int): HashCollisionMapNode[K, V1] = that match { - case hc: HashCollisionMapNode[K, V1] => - if (hc eq this) { - this - } else { - var newContent: VectorBuilder[(K, V1)] = null - val iter = content.iterator - while (iter.hasNext) { - val nextPayload = iter.next() - if (hc.indexOf(nextPayload._1) < 0) { - if (newContent eq null) { - newContent = new VectorBuilder[(K, V1)]() - newContent.addAll(hc.content) - } - newContent.addOne(nextPayload) - } - } - if (newContent eq null) hc else new HashCollisionMapNode(originalHash, hash, newContent.result()) - } - case _: BitmapIndexedMapNode[K, V1] => - // should never happen -- hash collisions are never at the same level as bitmapIndexedMapNodes - throw new UnsupportedOperationException("Cannot concatenate a HashCollisionMapNode with a BitmapIndexedMapNode") - } - - - override def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit = that match { - case hc: HashCollisionMapNode[K, V1] => - val iter = content.iterator - val rightArray = hc.content.toArray[AnyRef] // really Array[(K, V1)] - - def rightIndexOf(key: K): Int = { - var i = 0 - while (i < rightArray.length) { - val elem = rightArray(i) - if ((elem ne null) && (elem.asInstanceOf[(K, V1)])._1 == key) return i - i += 1 - } - -1 - } - - while (iter.hasNext) { - val nextPayload = iter.next() - val index = rightIndexOf(nextPayload._1) - - if (index == -1) { - builder.addOne(nextPayload) - } else { - val rightPayload = rightArray(index).asInstanceOf[(K, V1)] - rightArray(index) = null - - builder.addOne(mergef(nextPayload, rightPayload)) - } - } - - var i = 0 - while (i < rightArray.length) { - val elem = rightArray(i) - if (elem ne null) builder.addOne(elem.asInstanceOf[(K, V1)]) - i += 1 - } - case _: BitmapIndexedMapNode[K, V1] => - throw new Exception("Cannot merge HashCollisionMapNode with BitmapIndexedMapNode") - - } - - override def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit = { - val iter = content.iterator - while (iter.hasNext) { - val (k, v) = iter.next() - builder.addOne(k, v, originalHash, hash) - } - } - - override def filterImpl(pred: ((K, V)) => Boolean, flipped: Boolean): MapNode[K, V] = { - val newContent = content.filterImpl(pred, flipped) - val newContentLength = newContent.length - if (newContentLength == 0) { - MapNode.empty - } else if (newContentLength == 1) { - val (k, v) = newContent.head - new BitmapIndexedMapNode[K, V](bitposFrom(maskFrom(hash, 0)), 0, Array(k, v), Array(originalHash), 1, hash) - } else if (newContentLength == content.length) this - else new HashCollisionMapNode(originalHash, hash, newContent) - } - - override def copy(): HashCollisionMapNode[K, V] = new HashCollisionMapNode[K, V](originalHash, hash, content) - - override def hashCode(): Int = - throw new UnsupportedOperationException("Trie nodes do not support hashing.") - - override def cachedJavaKeySetHashCode: Int = size * hash - -} - -private final class MapKeyIterator[K, V](rootNode: MapNode[K, V]) - extends ChampBaseIterator[MapNode[K, V]](rootNode) with Iterator[K] { - - def next() = { - if (!hasNext) - throw new NoSuchElementException - - val key = currentValueNode.getKey(currentValueCursor) - currentValueCursor += 1 - - key - } - -} - -private final class MapValueIterator[K, V](rootNode: MapNode[K, V]) - extends ChampBaseIterator[MapNode[K, V]](rootNode) with Iterator[V] { - - def next() = { - if (!hasNext) - throw new NoSuchElementException - - val value = currentValueNode.getValue(currentValueCursor) - currentValueCursor += 1 - - value - } -} - -private final class MapKeyValueTupleIterator[K, V](rootNode: MapNode[K, V]) - extends ChampBaseIterator[MapNode[K, V]](rootNode) with Iterator[(K, V)] { - - def next() = { - if (!hasNext) - throw new NoSuchElementException - - val payload = currentValueNode.getPayload(currentValueCursor) - currentValueCursor += 1 - - payload - } - -} - -private final class MapKeyValueTupleReverseIterator[K, V](rootNode: MapNode[K, V]) - extends ChampBaseReverseIterator[MapNode[K, V]](rootNode) with Iterator[(K, V)] { - - def next() = { - if (!hasNext) - throw new NoSuchElementException - - val payload = currentValueNode.getPayload(currentValueCursor) - currentValueCursor -= 1 - - payload - } -} - -private final class MapKeyValueTupleHashIterator[K, V](rootNode: MapNode[K, V]) - extends ChampBaseReverseIterator[MapNode[K, V]](rootNode) with Iterator[Any] { - private[this] var hash = 0 - private[this] var value: V @uncheckedCaptures = _ - override def hashCode(): Int = MurmurHash3.tuple2Hash(hash, value.##, MurmurHash3.productSeed) - def next() = { - if (!hasNext) - throw new NoSuchElementException - - hash = currentValueNode.getHash(currentValueCursor) - value = currentValueNode.getValue(currentValueCursor) - currentValueCursor -= 1 - this - } -} - -/** Used in HashMap[K, V]#removeAll(HashSet[K]) */ -private final class MapNodeRemoveAllSetNodeIterator[K](rootSetNode: SetNode[K]) extends ChampBaseIterator(rootSetNode) { - /** Returns the result of immutably removing all keys in `rootSetNode` from `rootMapNode` */ - def removeAll[V](rootMapNode: BitmapIndexedMapNode[K, V]): BitmapIndexedMapNode[K, V] = { - var curr = rootMapNode - while (curr.size > 0 && hasNext) { - val originalHash = currentValueNode.getHash(currentValueCursor) - curr = curr.removed( - key = currentValueNode.getPayload(currentValueCursor), - keyHash = improve(originalHash), - originalHash = originalHash, - shift = 0 - ) - currentValueCursor += 1 - } - curr - } -} - -/** - * $factoryInfo - * - * @define Coll `immutable.HashMap` - * @define coll immutable champ hash map - */ -@SerialVersionUID(3L) -object HashMap extends MapFactory[HashMap] { - - @transient - private final val EmptyMap = new HashMap(MapNode.empty) - - def empty[K, V]: HashMap[K, V] = - EmptyMap.asInstanceOf[HashMap[K, V]] - - def from[K, V](source: collection.IterableOnce[(K, V)]^): HashMap[K, V] = - source match { - case hs: HashMap[K, V] => hs - case _ => (newBuilder[K, V] ++= source).result() - } - - /** Create a new Builder which can be reused after calling `result()` without an - * intermediate call to `clear()` in order to build multiple related results. - */ - def newBuilder[K, V]: ReusableBuilder[(K, V), HashMap[K, V]] = new HashMapBuilder[K, V] -} - - -/** A Builder for a HashMap. - * $multipleResults - */ -private[immutable] final class HashMapBuilder[K, V] extends ReusableBuilder[(K, V), HashMap[K, V]] { - import MapNode._ - import Node._ - - private def newEmptyRootNode = new BitmapIndexedMapNode[K, V](0, 0, Array.emptyObjectArray.asInstanceOf[Array[Any]], Array.emptyIntArray, 0, 0) - - /** The last given out HashMap as a return value of `result()`, if any, otherwise null. - * Indicates that on next add, the elements should be copied to an identical structure, before continuing - * mutations. */ - private var aliased: HashMap[K, V] @uncheckedCaptures = _ - - private def isAliased: Boolean = aliased != null - - /** The root node of the partially build hashmap */ - private var rootNode: BitmapIndexedMapNode[K, V] @uncheckedCaptures = newEmptyRootNode - - private[immutable] def getOrElse[V0 >: V](key: K, value: V0): V0 = - if (rootNode.size == 0) value - else { - val originalHash = key.## - rootNode.getOrElse(key, originalHash, improve(originalHash), 0, value) - } - - /** Inserts element `elem` into array `as` at index `ix`, shifting right the trailing elems */ - private[this] def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { - if (ix < 0) throw new ArrayIndexOutOfBoundsException - if (ix > as.length) throw new ArrayIndexOutOfBoundsException - val result = new Array[Int](as.length + 1) - arraycopy(as, 0, result, 0, ix) - result(ix) = elem - arraycopy(as, ix, result, ix + 1, as.length - ix) - result - } - - /** Inserts key-value into the bitmapIndexMapNode. Requires that this is a new key-value pair */ - private[this] def insertValue[V1 >: V](bm: BitmapIndexedMapNode[K, V],bitpos: Int, key: K, originalHash: Int, keyHash: Int, value: V1): Unit = { - val dataIx = bm.dataIndex(bitpos) - val idx = TupleLength * dataIx - - val src = bm.content - val dst = new Array[Any](src.length + TupleLength) - - // copy 'src' and insert 2 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, idx) - dst(idx) = key - dst(idx + 1) = value - arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) - - val dstHashes = insertElement(bm.originalHashes, dataIx, originalHash) - - bm.dataMap |= bitpos - bm.content = dst - bm.originalHashes = dstHashes - bm.size += 1 - bm.cachedJavaKeySetHashCode += keyHash - } - - /** Upserts a key/value pair into mapNode, mutably */ - private[immutable] def update(mapNode: MapNode[K, V], key: K, value: V, originalHash: Int, keyHash: Int, shift: Int): Unit = { - mapNode match { - case bm: BitmapIndexedMapNode[K, V] => - val mask = maskFrom(keyHash, shift) - val bitpos = bitposFrom(mask) - if ((bm.dataMap & bitpos) != 0) { - val index = indexFrom(bm.dataMap, mask, bitpos) - val key0 = bm.getKey(index) - val key0UnimprovedHash = bm.getHash(index) - - if (key0UnimprovedHash == originalHash && key0 == key) { - bm.content(TupleLength * index + 1) = value - } else { - val value0 = bm.getValue(index) - val key0Hash = improve(key0UnimprovedHash) - - val subNodeNew: MapNode[K, V] = - bm.mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize) - - bm.migrateFromInlineToNodeInPlace(bitpos, key0Hash, subNodeNew) - } - - } else if ((bm.nodeMap & bitpos) != 0) { - val index = indexFrom(bm.nodeMap, mask, bitpos) - val subNode = bm.getNode(index) - val beforeSize = subNode.size - val beforeHash = subNode.cachedJavaKeySetHashCode - update(subNode, key, value, originalHash, keyHash, shift + BitPartitionSize) - bm.size += subNode.size - beforeSize - bm.cachedJavaKeySetHashCode += subNode.cachedJavaKeySetHashCode - beforeHash - } else { - insertValue(bm, bitpos, key, originalHash, keyHash, value) - } - case hc: HashCollisionMapNode[K, V] => - val index = hc.indexOf(key) - if (index < 0) { - hc.content = hc.content.appended((key, value)) - } else { - hc.content = hc.content.updated(index, (key, value)) - } - } - } - - /** If currently referencing aliased structure, copy elements to new mutable structure */ - private[this] def ensureUnaliased() = { - if (isAliased) copyElems() - aliased = null - } - - /** Copy elements to new mutable structure */ - private[this] def copyElems(): Unit = { - rootNode = rootNode.copy() - } - - override def result(): HashMap[K, V] = - if (rootNode.size == 0) { - HashMap.empty - } else if (aliased != null) { - aliased - } else { - aliased = new HashMap(rootNode) - releaseFence() - aliased - } - - override def addOne(elem: (K, V)): this.type = { - ensureUnaliased() - val h = elem._1.## - val im = improve(h) - update(rootNode, elem._1, elem._2, h, im, 0) - this - } - - def addOne(key: K, value: V): this.type = { - ensureUnaliased() - val originalHash = key.## - update(rootNode, key, value, originalHash, improve(originalHash), 0) - this - } - def addOne(key: K, value: V, originalHash: Int): this.type = { - ensureUnaliased() - update(rootNode, key, value, originalHash, improve(originalHash), 0) - this - } - def addOne(key: K, value: V, originalHash: Int, hash: Int): this.type = { - ensureUnaliased() - update(rootNode, key, value, originalHash, hash, 0) - this - } - - override def addAll(xs: IterableOnce[(K, V)]^): this.type = { - ensureUnaliased() - xs match { - case hm: HashMap[K, V] => - new ChampBaseIterator[MapNode[K, V]](hm.rootNode) { - while(hasNext) { - val originalHash = currentValueNode.getHash(currentValueCursor) - update( - mapNode = rootNode, - key = currentValueNode.getKey(currentValueCursor), - value = currentValueNode.getValue(currentValueCursor), - originalHash = originalHash, - keyHash = improve(originalHash), - shift = 0 - ) - currentValueCursor += 1 - } - }.asInstanceOf // !!! cc gets confused with representation of capture sets in invariant position - case hm: collection.mutable.HashMap[K, V] => - val iter = hm.nodeIterator - while (iter.hasNext) { - val next = iter.next() - val originalHash = hm.unimproveHash(next.hash) - val hash = improve(originalHash) - update(rootNode, next.key, next.value, originalHash, hash, 0) - } - case lhm: collection.mutable.LinkedHashMap[K, V] => - val iter = lhm.entryIterator - while (iter.hasNext) { - val next = iter.next() - val originalHash = lhm.unimproveHash(next.hash) - val hash = improve(originalHash) - update(rootNode, next.key, next.value, originalHash, hash, 0) - } - case thatMap: Map[K, V] => - thatMap.foreachEntry((key, value) => addOne(key, value)) - case other => - val it = other.iterator - while(it.hasNext) addOne(it.next()) - } - - this - } - - override def clear(): Unit = { - aliased = null - if (rootNode.size > 0) { - rootNode = newEmptyRootNode - } - } - - private[collection] def size: Int = rootNode.size - - override def knownSize: Int = rootNode.size -} diff --git a/tests/pos-special/stdlib/collection/immutable/HashSet.scala b/tests/pos-special/stdlib/collection/immutable/HashSet.scala deleted file mode 100644 index 38f394a7005f..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/HashSet.scala +++ /dev/null @@ -1,2125 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import java.lang.Integer.{bitCount, numberOfTrailingZeros} -import java.lang.System.arraycopy - -import scala.collection.Hashing.improve -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializable -import scala.collection.mutable.ReusableBuilder -import scala.runtime.Statics.releaseFence -import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** This class implements immutable sets using a Compressed Hash-Array Mapped Prefix-tree. - * See paper https://michael.steindorfer.name/publications/oopsla15.pdf for more details. - * - * @tparam A the type of the elements contained in this hash set. - * @define Coll `immutable.HashSet` - * @define coll immutable champ hash set - */ -final class HashSet[A] private[immutable](private[immutable] val rootNode: BitmapIndexedSetNode[A]) - extends AbstractSet[A] - with StrictOptimizedSetOps[A, HashSet, HashSet[A]] - with IterableFactoryDefaults[A, HashSet] - with DefaultSerializable { - - def this() = this(SetNode.empty) - - // This release fence is present because rootNode may have previously been mutated during construction. - releaseFence() - - private[this] def newHashSetOrThis(newRootNode: BitmapIndexedSetNode[A]): HashSet[A] = - if (rootNode eq newRootNode) this else new HashSet(newRootNode) - - override def iterableFactory: IterableFactory[HashSet] = HashSet - - override def knownSize: Int = rootNode.size - - override def size: Int = rootNode.size - - override def isEmpty: Boolean = rootNode.size == 0 - - def iterator: Iterator[A] = { - if (isEmpty) Iterator.empty - else new SetIterator[A](rootNode) - } - - protected[immutable] def reverseIterator: Iterator[A] = new SetReverseIterator[A](rootNode) - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { - import convert.impl._ - val s = shape.shape match { - case StepperShape.IntShape => IntChampStepper.from[ SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Int]) - case StepperShape.LongShape => LongChampStepper.from[ SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Long]) - case StepperShape.DoubleShape => DoubleChampStepper.from[SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Double]) - case _ => shape.parUnbox(AnyChampStepper.from[A, SetNode[A]](size, rootNode, (node, i) => node.getPayload(i))) - } - s.asInstanceOf[S with EfficientSplit] - } - - def contains(element: A): Boolean = { - val elementUnimprovedHash = element.## - val elementHash = improve(elementUnimprovedHash) - rootNode.contains(element, elementUnimprovedHash, elementHash, 0) - } - - def incl(element: A): HashSet[A] = { - val elementUnimprovedHash = element.## - val elementHash = improve(elementUnimprovedHash) - val newRootNode = rootNode.updated(element, elementUnimprovedHash, elementHash, 0) - newHashSetOrThis(newRootNode) - } - - def excl(element: A): HashSet[A] = { - val elementUnimprovedHash = element.## - val elementHash = improve(elementUnimprovedHash) - val newRootNode = rootNode.removed(element, elementUnimprovedHash, elementHash, 0) - newHashSetOrThis(newRootNode) - } - - override def concat(that: IterableOnce[A]): HashSet[A] = - that match { - case hs: HashSet[A] => - if (isEmpty) hs - else { - val newNode = rootNode.concat(hs.rootNode, 0) - if (newNode eq hs.rootNode) hs - else newHashSetOrThis(newNode) - } - case hs: collection.mutable.HashSet[A] => - val iter = hs.nodeIterator - var current = rootNode - while (iter.hasNext) { - val next = iter.next() - val originalHash = hs.unimproveHash(next.hash) - val improved = improve(originalHash) - current = current.updated(next.key, originalHash, improved, 0) - - if (current ne rootNode) { - var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) - while (iter.hasNext) { - val next = iter.next() - val originalHash = hs.unimproveHash(next.hash) - val improved = improve(originalHash) - shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, originalHash, improved, 0, shallowlyMutableNodeMap) - } - return new HashSet(current) - } - } - this - case lhs: collection.mutable.LinkedHashSet[A] => - val iter = lhs.entryIterator - var current = rootNode - while (iter.hasNext) { - val next = iter.next() - val originalHash = lhs.unimproveHash(next.hash) - val improved = improve(originalHash) - current = current.updated(next.key, originalHash, improved, 0) - - if (current ne rootNode) { - var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) - while (iter.hasNext) { - val next = iter.next() - val originalHash = lhs.unimproveHash(next.hash) - val improved = improve(originalHash) - shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, originalHash, improved, 0, shallowlyMutableNodeMap) - } - return new HashSet(current) - } - } - this - case _ => - val iter = that.iterator - var current = rootNode - while (iter.hasNext) { - val element = iter.next() - val originalHash = element.## - val improved = improve(originalHash) - current = current.updated(element, originalHash, improved, 0) - - if (current ne rootNode) { - // Note: We could have started with shallowlyMutableNodeMap = 0, however this way, in the case that - // the first changed key ended up in a subnode beneath root, we mark that root right away as being - // shallowly mutable. - // - // since `element` has just been inserted, and certainly caused a new root node to be created, we can say with - // certainty that it either caused a new subnode to be created underneath `current`, in which case we should - // carry on mutating that subnode, or it ended up as a child data pair of the root, in which case, no harm is - // done by including its bit position in the shallowlyMutableNodeMap anyways. - var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) - while (iter.hasNext) { - val element = iter.next() - val originalHash = element.## - val improved = improve(originalHash) - shallowlyMutableNodeMap = current.updateWithShallowMutations(element, originalHash, improved, 0, shallowlyMutableNodeMap) - } - return new HashSet(current) - } - } - this - } - - override def tail: HashSet[A] = this - head - - override def init: HashSet[A] = this - last - - override def head: A = iterator.next() - - override def last: A = reverseIterator.next() - - override def foreach[U](f: A => U): Unit = rootNode.foreach(f) - - /** Applies a function f to each element, and its corresponding **original** hash, in this Set */ - @`inline` private[collection] def foreachWithHash(f: (A, Int) => Unit): Unit = rootNode.foreachWithHash(f) - - /** Applies a function f to each element, and its corresponding **original** hash, in this Set - * Stops iterating the first time that f returns `false`.*/ - @`inline` private[collection] def foreachWithHashWhile(f: (A, Int) => Boolean): Unit = rootNode.foreachWithHashWhile(f) - - def subsetOf(that: Set[A]): Boolean = if (that.isEmpty) true else that match { - case set: HashSet[A] => rootNode.subsetOf(set.rootNode, 0) - case _ => super.subsetOf(that) - } - - override def equals(that: Any): Boolean = - that match { - case set: HashSet[_] => (this eq set) || (this.rootNode == set.rootNode) - case _ => super.equals(that) - } - - override protected[this] def className = "HashSet" - - override def hashCode(): Int = { - val it = new SetHashIterator(rootNode) - val hash = MurmurHash3.unorderedHash(it, MurmurHash3.setSeed) - //assert(hash == super.hashCode()) - hash - } - - override def diff(that: collection.Set[A]): HashSet[A] = { - if (isEmpty) { - this - } else { - that match { - case hashSet: HashSet[A] => - if (hashSet.isEmpty) this else { - val newRootNode = rootNode.diff(hashSet.rootNode, 0) - if (newRootNode.size == 0) HashSet.empty else newHashSetOrThis(rootNode.diff(hashSet.rootNode, 0)) - } - case hashSet: collection.mutable.HashSet[A] => - val iter = hashSet.nodeIterator - var curr = rootNode - while (iter.hasNext) { - val next = iter.next() - val originalHash = hashSet.unimproveHash(next.hash) - val improved = improve(originalHash) - curr = curr.removed(next.key, originalHash, improved, 0) - if (curr ne rootNode) { - if (curr.size == 0) { - return HashSet.empty - } - while (iter.hasNext) { - val next = iter.next() - val originalHash = hashSet.unimproveHash(next.hash) - val improved = improve(originalHash) - - curr.removeWithShallowMutations(next.key, originalHash, improved) - - if (curr.size == 0) { - return HashSet.empty - } - } - return new HashSet(curr) - } - } - this - - case other => - val thatKnownSize = other.knownSize - - if (thatKnownSize == 0) { - this - } else if (thatKnownSize <= size) { - /* this branch intentionally includes the case of thatKnownSize == -1. We know that HashSets are quite fast at look-up, so - we're likely to be the faster of the two at that. */ - removedAllWithShallowMutations(other) - } else { - // TODO: Develop more sophisticated heuristic for which branch to take - filterNot(other.contains) - } - } - - } - } - - /** Immutably removes all elements of `that` from this HashSet - * - * Mutation is used internally, but only on root SetNodes which this method itself creates. - * - * That is, this method is safe to call on published sets because it does not mutate `this` - */ - private[this] def removedAllWithShallowMutations(that: IterableOnce[A]): HashSet[A] = { - val iter = that.iterator - var curr = rootNode - while (iter.hasNext) { - val next = iter.next() - val originalHash = next.## - val improved = improve(originalHash) - curr = curr.removed(next, originalHash, improved, 0) - if (curr ne rootNode) { - if (curr.size == 0) { - return HashSet.empty - } - while (iter.hasNext) { - val next = iter.next() - val originalHash = next.## - val improved = improve(originalHash) - - curr.removeWithShallowMutations(next, originalHash, improved) - - if (curr.size == 0) { - return HashSet.empty - } - } - return new HashSet(curr) - } - } - this - } - - override def removedAll(that: IterableOnce[A]): HashSet[A] = that match { - case set: scala.collection.Set[A] => diff(set) - case range: Range if range.length > size => - filter { - case i: Int => !range.contains(i) - case _ => true - } - - case _ => - removedAllWithShallowMutations(that) - } - - override def partition(p: A => Boolean): (HashSet[A], HashSet[A]) = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - super.partition(p) - } - - override def span(p: A => Boolean): (HashSet[A], HashSet[A]) = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - super.span(p) - } - - override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): HashSet[A] = { - val newRootNode = rootNode.filterImpl(pred, isFlipped) - if (newRootNode eq rootNode) this - else if (newRootNode.size == 0) HashSet.empty - else new HashSet(newRootNode) - } - - override def intersect(that: collection.Set[A]): HashSet[A] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - super.intersect(that) - } - - override def take(n: Int): HashSet[A] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - super.take(n) - } - - override def takeRight(n: Int): HashSet[A] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - super.takeRight(n) - } - - override def takeWhile(p: A => Boolean): HashSet[A] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - super.takeWhile(p) - } - - override def drop(n: Int): HashSet[A] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - super.drop(n) - } - - override def dropRight(n: Int): HashSet[A] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - super.dropRight(n) - } - - override def dropWhile(p: A => Boolean): HashSet[A] = { - // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included - // in a minor release without breaking binary compatibility. - super.dropWhile(p) - } -} - -private[immutable] object SetNode { - - private final val EmptySetNode = new BitmapIndexedSetNode(0, 0, Array.empty, Array.empty, 0, 0) - - def empty[A]: BitmapIndexedSetNode[A] = EmptySetNode.asInstanceOf[BitmapIndexedSetNode[A]] - - final val TupleLength = 1 - -} - -private[immutable] sealed abstract class SetNode[A] extends Node[SetNode[A]] { - - def contains(element: A, originalHash: Int, hash: Int, shift: Int): Boolean - - def updated(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] - - def removed(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] - - def hasNodes: Boolean - - def nodeArity: Int - - def getNode(index: Int): SetNode[A] - - def hasPayload: Boolean - - def payloadArity: Int - - def getPayload(index: Int): A - - def size: Int - - def foreach[U](f: A => U): Unit - - def subsetOf(that: SetNode[A], shift: Int): Boolean - - def copy(): SetNode[A] - - def filterImpl(pred: A => Boolean, flipped: Boolean): SetNode[A] - - def diff(that: SetNode[A], shift: Int): SetNode[A] - - def concat(that: SetNode[A], shift: Int): SetNode[A] - - def foreachWithHash(f: (A, Int) => Unit): Unit - - def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean -} - -private final class BitmapIndexedSetNode[A]( - var dataMap: Int, - var nodeMap: Int, - var content: Array[Any], - var originalHashes: Array[Int], - var size: Int, - var cachedJavaKeySetHashCode: Int) extends SetNode[A] { - - import Node._ - import SetNode._ - - /* - assert(checkInvariantContentIsWellTyped()) - assert(checkInvariantSubNodesAreCompacted()) - - private final def checkInvariantSubNodesAreCompacted(): Boolean = - new SetIterator[A](this).size - payloadArity >= 2 * nodeArity - - private final def checkInvariantContentIsWellTyped(): Boolean = { - val predicate1 = TupleLength * payloadArity + nodeArity == content.length - - val predicate2 = Range(0, TupleLength * payloadArity) - .forall(i => content(i).isInstanceOf[SetNode[_]] == false) - - val predicate3 = Range(TupleLength * payloadArity, content.length) - .forall(i => content(i).isInstanceOf[SetNode[_]] == true) - - predicate1 && predicate2 && predicate3 - } - */ - - def getPayload(index: Int): A = content(index).asInstanceOf[A] - - override def getHash(index: Int): Int = originalHashes(index) - - def getNode(index: Int): SetNode[A] = content(content.length - 1 - index).asInstanceOf[SetNode[A]] - - def contains(element: A, originalHash: Int, elementHash: Int, shift: Int): Boolean = { - val mask = maskFrom(elementHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - return originalHashes(index) == originalHash && element == this.getPayload(index) - } - - if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - return this.getNode(index).contains(element, originalHash, elementHash, shift + BitPartitionSize) - } - - false - } - - def updated(element: A, originalHash: Int, elementHash: Int, shift: Int): BitmapIndexedSetNode[A] = { - val mask = maskFrom(elementHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - val element0 = this.getPayload(index) - - if (element0.asInstanceOf[AnyRef] eq element.asInstanceOf[AnyRef]) { - return this - } else { - val element0UnimprovedHash = getHash(index) - val element0Hash = improve(element0UnimprovedHash) - if (originalHash == element0UnimprovedHash && element0 == element) { - return this - } else { - val subNodeNew = mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize) - return copyAndMigrateFromInlineToNode(bitpos, element0Hash, subNodeNew) - } - } - } - if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - val subNode = this.getNode(index) - - val subNodeNew = subNode.updated(element, originalHash, elementHash, shift + BitPartitionSize) - if (subNode eq subNodeNew) { - return this - } else { - return copyAndSetNode(bitpos, subNode, subNodeNew) - } - } - - copyAndInsertValue(bitpos, element, originalHash, elementHash) - } - /** A variant of `updated` which performs shallow mutations on the root (`this`), and if possible, on immediately - * descendant child nodes (only one level beneath `this`) - * - * The caller should pass a bitmap of child nodes of this node, which this method may mutate. - * If this method may mutate a child node, then if the updated value is located in that child node, it will - * be shallowly mutated (its children will not be mutated). - * - * If instead this method may not mutate the child node in which the to-be-updated value is located, then - * that child will be updated immutably, but the result will be mutably re-inserted as a child of this node. - * - * @param key the key to update - * @param originalHash key.## - * @param keyHash the improved hash - * @param shallowlyMutableNodeMap bitmap of child nodes of this node, which can be shallowly mutated - * during the call to this method - * - * @return Int which is the bitwise OR of shallowlyMutableNodeMap and any freshly created nodes, which will be - * available for mutations in subsequent calls. - */ - def updateWithShallowMutations(element: A, originalHash: Int, elementHash: Int, shift: Int, shallowlyMutableNodeMap: Int): Int = { - val mask = maskFrom(elementHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - val element0 = getPayload(index) - val element0UnimprovedHash = getHash(index) - if (element0UnimprovedHash == originalHash && element0 == element) { - shallowlyMutableNodeMap - } else { - val element0Hash = improve(element0UnimprovedHash) - val subNodeNew = mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize) - migrateFromInlineToNodeInPlace(bitpos, element0Hash, subNodeNew) - shallowlyMutableNodeMap | bitpos - } - } else if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - val subNode = this.getNode(index) - val subNodeSize = subNode.size - val subNodeCachedJavaKeySetHashCode = subNode.cachedJavaKeySetHashCode - - var returnNodeMap = shallowlyMutableNodeMap - - val subNodeNew: SetNode[A] = subNode match { - case subNodeBm: BitmapIndexedSetNode[A] if (bitpos & shallowlyMutableNodeMap) != 0 => - subNodeBm.updateWithShallowMutations(element, originalHash, elementHash, shift + BitPartitionSize, 0) - subNodeBm - case _ => - val subNodeNew = subNode.updated(element, originalHash, elementHash, shift + BitPartitionSize) - if (subNodeNew ne subNode) { - returnNodeMap |= bitpos - } - subNodeNew - } - - this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew - this.size = this.size - subNodeSize + subNodeNew.size - this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNodeCachedJavaKeySetHashCode + subNodeNew.cachedJavaKeySetHashCode - returnNodeMap - } else { - val dataIx = dataIndex(bitpos) - val idx = dataIx - - val src = this.content - val dst = new Array[Any](src.length + TupleLength) - - // copy 'src' and insert 2 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, idx) - dst(idx) = element - arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) - - val dstHashes = insertElement(originalHashes, dataIx, originalHash) - - this.dataMap |= bitpos - this.content = dst - this.originalHashes = dstHashes - this.size += 1 - this.cachedJavaKeySetHashCode += elementHash - shallowlyMutableNodeMap - } - } - - - def removed(element: A, originalHash: Int, elementHash: Int, shift: Int): BitmapIndexedSetNode[A] = { - val mask = maskFrom(elementHash, shift) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - val element0 = this.getPayload(index) - - if (element0 == element) { - if (this.payloadArity == 2 && this.nodeArity == 0) { - /* - * Create new node with remaining pair. The new node will a) either become the new root - * returned, or b) unwrapped and inlined during returning. - */ - val newDataMap = if (shift == 0) (dataMap ^ bitpos) else bitposFrom(maskFrom(elementHash, 0)) - if (index == 0) - return new BitmapIndexedSetNode[A](newDataMap, 0, Array(getPayload(1)), Array(originalHashes(1)), size - 1, improve(originalHashes(1))) - else - return new BitmapIndexedSetNode[A](newDataMap, 0, Array(getPayload(0)), Array(originalHashes(0)), size - 1, improve(originalHashes(0))) - } - else return copyAndRemoveValue(bitpos, elementHash) - } else return this - } - - if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - val subNode = this.getNode(index) - - val subNodeNew = subNode.removed(element, originalHash, elementHash, shift + BitPartitionSize) - - if (subNodeNew eq subNode) return this - - // cache just in case subNodeNew is a hashCollision node, in which in which case a little arithmetic is avoided - // in Vector#length - val subNodeNewSize = subNodeNew.size - - if (subNodeNewSize == 1) { - if (this.size == subNode.size) { - // subNode is the only child (no other data or node children of `this` exist) - // escalate (singleton or empty) result - return subNodeNew.asInstanceOf[BitmapIndexedSetNode[A]] - } else { - // inline value (move to front) - return copyAndMigrateFromNodeToInline(bitpos, elementHash, subNode, subNodeNew) - } - } else if (subNodeNewSize > 1) { - // modify current node (set replacement node) - return copyAndSetNode(bitpos, subNode, subNodeNew) - } - } - - this - } - /** Variant of `removed` which will perform mutation on only the top-level node (`this`), rather than return a new - * node - * - * Should only be called on root nodes, because shift is assumed to be 0 - * - * @param element the element to remove - * @param originalHash the original hash of `element` - * @param elementHash the improved hash of `element` - */ - def removeWithShallowMutations(element: A, originalHash: Int, elementHash: Int): this.type = { - val mask = maskFrom(elementHash, 0) - val bitpos = bitposFrom(mask) - - if ((dataMap & bitpos) != 0) { - val index = indexFrom(dataMap, mask, bitpos) - val element0 = this.getPayload(index) - - if (element0 == element) { - if (this.payloadArity == 2 && this.nodeArity == 0) { - val newDataMap = dataMap ^ bitpos - if (index == 0) { - val newContent = Array[Any](getPayload(1)) - val newOriginalHashes = Array(originalHashes(1)) - val newCachedJavaKeySetHashCode = improve(getHash(1)) - this.content = newContent - this.originalHashes = newOriginalHashes - this.cachedJavaKeySetHashCode = newCachedJavaKeySetHashCode - } else { - val newContent = Array[Any](getPayload(0)) - val newOriginalHashes = Array(originalHashes(0)) - val newCachedJavaKeySetHashCode = improve(getHash(0)) - this.content = newContent - this.originalHashes = newOriginalHashes - this.cachedJavaKeySetHashCode = newCachedJavaKeySetHashCode - } - this.dataMap = newDataMap - this.nodeMap = 0 - this.size = 1 - this - } - else { - val dataIx = dataIndex(bitpos) - val idx = TupleLength * dataIx - - val src = this.content - val dst = new Array[Any](src.length - TupleLength) - - arraycopy(src, 0, dst, 0, idx) - arraycopy(src, idx + TupleLength, dst, idx, src.length - idx - TupleLength) - - val dstHashes = removeElement(originalHashes, dataIx) - - this.dataMap = this.dataMap ^ bitpos - this.content = dst - this.originalHashes = dstHashes - this.size -= 1 - this.cachedJavaKeySetHashCode -= elementHash - this - } - } else this - } else if ((nodeMap & bitpos) != 0) { - val index = indexFrom(nodeMap, mask, bitpos) - val subNode = this.getNode(index) - - val subNodeNew = subNode.removed(element, originalHash, elementHash, BitPartitionSize).asInstanceOf[BitmapIndexedSetNode[A]] - - if (subNodeNew eq subNode) return this - - if (subNodeNew.size == 1) { - if (this.payloadArity == 0 && this.nodeArity == 1) { - this.dataMap = subNodeNew.dataMap - this.nodeMap = subNodeNew.nodeMap - this.content = subNodeNew.content - this.originalHashes = subNodeNew.originalHashes - this.size = subNodeNew.size - this.cachedJavaKeySetHashCode = subNodeNew.cachedJavaKeySetHashCode - this - } else { - migrateFromNodeToInlineInPlace(bitpos, originalHash, elementHash, subNode, subNodeNew) - this - } - } else { - // size must be > 1 - this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew - this.size -= 1 - this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNode.cachedJavaKeySetHashCode + subNodeNew.cachedJavaKeySetHashCode - this - } - } else this - } - - def mergeTwoKeyValPairs(key0: A, originalKeyHash0: Int, keyHash0: Int, key1: A, originalKeyHash1: Int, keyHash1: Int, shift: Int): SetNode[A] = { - // assert(key0 != key1) - - if (shift >= HashCodeLength) { - new HashCollisionSetNode[A](originalKeyHash0, keyHash0, Vector(key0, key1)) - } else { - val mask0 = maskFrom(keyHash0, shift) - val mask1 = maskFrom(keyHash1, shift) - - if (mask0 != mask1) { - // unique prefixes, payload fits on same level - val dataMap = bitposFrom(mask0) | bitposFrom(mask1) - val newCachedHashCode = keyHash0 + keyHash1 - - if (mask0 < mask1) { - new BitmapIndexedSetNode[A](dataMap, 0, Array(key0, key1), Array(originalKeyHash0, originalKeyHash1), 2, newCachedHashCode) - } else { - new BitmapIndexedSetNode[A](dataMap, 0, Array(key1, key0), Array(originalKeyHash1, originalKeyHash0), 2, newCachedHashCode) - } - } else { - // identical prefixes, payload must be disambiguated deeper in the trie - val nodeMap = bitposFrom(mask0) - val node = mergeTwoKeyValPairs(key0, originalKeyHash0, keyHash0, key1, originalKeyHash1, keyHash1, shift + BitPartitionSize) - - new BitmapIndexedSetNode[A](0, nodeMap, Array(node), Array.emptyIntArray, node.size, node.cachedJavaKeySetHashCode) - } - } - } - - def hasPayload: Boolean = dataMap != 0 - - def payloadArity: Int = bitCount(dataMap) - - def hasNodes: Boolean = nodeMap != 0 - - def nodeArity: Int = bitCount(nodeMap) - - def dataIndex(bitpos: Int) = bitCount(dataMap & (bitpos - 1)) - - def nodeIndex(bitpos: Int) = bitCount(nodeMap & (bitpos - 1)) - - def copyAndSetNode(bitpos: Int, oldNode: SetNode[A], newNode: SetNode[A]) = { - val idx = this.content.length - 1 - this.nodeIndex(bitpos) - - val src = this.content - val dst = new Array[Any](src.length) - - // copy 'src' and set 1 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, src.length) - dst(idx) = newNode - new BitmapIndexedSetNode[A]( - dataMap = dataMap, - nodeMap = nodeMap, - content = dst, - originalHashes = originalHashes, - size = size - oldNode.size + newNode.size, - cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + newNode.cachedJavaKeySetHashCode - ) - } - - def copyAndInsertValue(bitpos: Int, key: A, originalHash: Int, elementHash: Int) = { - val dataIx = dataIndex(bitpos) - val idx = TupleLength * dataIx - - val src = this.content - val dst = new Array[Any](src.length + 1) - - // copy 'src' and insert 1 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, idx) - dst(idx) = key - arraycopy(src, idx, dst, idx + 1, src.length - idx) - val dstHashes = insertElement(originalHashes, dataIx, originalHash) - - new BitmapIndexedSetNode[A](dataMap | bitpos, nodeMap, dst, dstHashes, size + 1, cachedJavaKeySetHashCode + elementHash) - } - - def copyAndSetValue(bitpos: Int, key: A, originalHash: Int, elementHash: Int) = { - val dataIx = dataIndex(bitpos) - val idx = TupleLength * dataIx - - val src = this.content - val dst = new Array[Any](src.length) - - // copy 'src' and set 1 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, src.length) - dst(idx) = key - - new BitmapIndexedSetNode[A](dataMap | bitpos, nodeMap, dst, originalHashes, size, cachedJavaKeySetHashCode) - } - - def copyAndRemoveValue(bitpos: Int, elementHash: Int) = { - val dataIx = dataIndex(bitpos) - val idx = TupleLength * dataIx - - val src = this.content - val dst = new Array[Any](src.length - 1) - - // copy 'src' and remove 1 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, idx) - arraycopy(src, idx + 1, dst, idx, src.length - idx - 1) - val dstHashes = removeElement(originalHashes, dataIx) - new BitmapIndexedSetNode[A](dataMap ^ bitpos, nodeMap, dst, dstHashes, size - 1, cachedJavaKeySetHashCode - elementHash) - } - - def copyAndMigrateFromInlineToNode(bitpos: Int, elementHash: Int, node: SetNode[A]) = { - val dataIx = dataIndex(bitpos) - val idxOld = TupleLength * dataIx - val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) - - val src = this.content - val dst = new Array[Any](src.length - 1 + 1) - - // copy 'src' and remove 1 element(s) at position 'idxOld' and - // insert 1 element(s) at position 'idxNew' - // assert(idxOld <= idxNew) - arraycopy(src, 0, dst, 0, idxOld) - arraycopy(src, idxOld + 1, dst, idxOld, idxNew - idxOld) - dst(idxNew) = node - arraycopy(src, idxNew + 1, dst, idxNew + 1, src.length - idxNew - 1) - val dstHashes = removeElement(originalHashes, dataIx) - new BitmapIndexedSetNode[A]( - dataMap = dataMap ^ bitpos, - nodeMap = nodeMap | bitpos, - content = dst, originalHashes = dstHashes, - size = size - 1 + node.size, - cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - elementHash + node.cachedJavaKeySetHashCode - ) - } - /** Variant of `copyAndMigrateFromInlineToNode` which mutates `this` rather than returning a new node. - * - * Note: This method will mutate `this`, and will mutate `this.content` - * - * Mutation of `this.content` will occur as an optimization not possible in maps. Since TupleLength == 1 for sets, - * content array size does not change during inline <-> node migrations. Therefor, since we are updating in-place, - * we reuse this.content by shifting data/nodes around, rather than allocating a new array. - * - * @param bitpos the bit position of the data to migrate to node - * @param keyHash the improved hash of the element currently at `bitpos` - * @param node the node to place at `bitpos` - */ - def migrateFromInlineToNodeInPlace(bitpos: Int, keyHash: Int, node: SetNode[A]): this.type = { - val dataIx = dataIndex(bitpos) - val idxOld = TupleLength * dataIx - val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) - - arraycopy(content, idxOld + TupleLength, content, idxOld, idxNew - idxOld) - content(idxNew) = node - - this.dataMap = this.dataMap ^ bitpos - this.nodeMap = this.nodeMap | bitpos - this.originalHashes = removeElement(originalHashes, dataIx) - this.size = this.size - 1 + node.size - this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode - this - } - - def copyAndMigrateFromNodeToInline(bitpos: Int, elementHash: Int, oldNode: SetNode[A], node: SetNode[A]) = { - val idxOld = this.content.length - 1 - nodeIndex(bitpos) - val dataIxNew = dataIndex(bitpos) - val idxNew = TupleLength * dataIxNew - - val src = this.content - val dst = new Array[Any](src.length - 1 + 1) - - // copy 'src' and remove 1 element(s) at position 'idxOld' and - // insert 1 element(s) at position 'idxNew' - // assert(idxOld >= idxNew) - arraycopy(src, 0, dst, 0, idxNew) - dst(idxNew) = node.getPayload(0) - arraycopy(src, idxNew, dst, idxNew + 1, idxOld - idxNew) - arraycopy(src, idxOld + 1, dst, idxOld + 1, src.length - idxOld - 1) - val hash = node.getHash(0) - val dstHashes = insertElement(originalHashes, dataIxNew, hash) - new BitmapIndexedSetNode[A]( - dataMap = dataMap | bitpos, - nodeMap = nodeMap ^ bitpos, - content = dst, - originalHashes = dstHashes, - size = size - oldNode.size + 1, - cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode - ) - } - - /** Variant of `copyAndMigrateFromNodeToInline` which mutates `this` rather than returning a new node. - * - * Note: This method will mutate `this`, and will mutate `this.content` - * - * Mutation of `this.content` will occur as an optimization not possible in maps. Since TupleLength == 1 for sets, - * content array size does not change during inline <-> node migrations. Therefor, since we are updating in-place, - * we reuse this.content by shifting data/nodes around, rather than allocating a new array. - * - * @param bitpos the bit position of the node to migrate inline - * @param oldNode the node currently stored at position `bitpos` - * @param node the node containing the single element to migrate inline - */ - def migrateFromNodeToInlineInPlace(bitpos: Int, originalHash: Int, elementHash: Int, oldNode: SetNode[A], node: SetNode[A]): Unit = { - val idxOld = this.content.length - 1 - nodeIndex(bitpos) - val dataIxNew = dataIndex(bitpos) - val element = node.getPayload(0) - arraycopy(content, dataIxNew, content, dataIxNew + 1, idxOld - dataIxNew) - content(dataIxNew) = element - val hash = node.getHash(0) - val dstHashes = insertElement(originalHashes, dataIxNew, hash) - - this.dataMap = this.dataMap | bitpos - this.nodeMap = this.nodeMap ^ bitpos - this.originalHashes = dstHashes - this.size = this.size - oldNode.size + 1 - this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode - } - - def foreach[U](f: A => U): Unit = { - val thisPayloadArity = payloadArity - var i = 0 - while (i < thisPayloadArity) { - f(getPayload(i)) - i += 1 - } - - val thisNodeArity = nodeArity - var j = 0 - while (j < thisNodeArity) { - getNode(j).foreach(f) - j += 1 - } - } - - def subsetOf(that: SetNode[A], shift: Int): Boolean = if (this eq that) true else that match { - case _: HashCollisionSetNode[A] => false - case node: BitmapIndexedSetNode[A] => - val thisBitmap = this.dataMap | this.nodeMap - val nodeBitmap = node.dataMap | node.nodeMap - - if ((thisBitmap | nodeBitmap) != nodeBitmap) - return false - - var bitmap = thisBitmap & nodeBitmap - var bitsToSkip = numberOfTrailingZeros(bitmap) - - var isValidSubset = true - while (isValidSubset && bitsToSkip < HashCodeLength) { - val bitpos = bitposFrom(bitsToSkip) - - isValidSubset = - if ((this.dataMap & bitpos) != 0) { - if ((node.dataMap & bitpos) != 0) { - // Data x Data - val payload0 = this.getPayload(indexFrom(this.dataMap, bitpos)) - val payload1 = node.getPayload(indexFrom(node.dataMap, bitpos)) - payload0 == payload1 - } else { - // Data x Node - val thisDataIndex = indexFrom(this.dataMap, bitpos) - val payload = this.getPayload(thisDataIndex) - val subNode = that.getNode(indexFrom(node.nodeMap, bitpos)) - val elementUnimprovedHash = getHash(thisDataIndex) - val elementHash = improve(elementUnimprovedHash) - subNode.contains(payload, elementUnimprovedHash, elementHash, shift + BitPartitionSize) - } - } else { - // Node x Node - val subNode0 = this.getNode(indexFrom(this.nodeMap, bitpos)) - val subNode1 = node.getNode(indexFrom(node.nodeMap, bitpos)) - subNode0.subsetOf(subNode1, shift + BitPartitionSize) - } - - val newBitmap = bitmap ^ bitpos - bitmap = newBitmap - bitsToSkip = numberOfTrailingZeros(newBitmap) - } - isValidSubset - } - - override def filterImpl(pred: A => Boolean, flipped: Boolean): BitmapIndexedSetNode[A] = { - if (size == 0) this - else if (size == 1) { - if (pred(getPayload(0)) != flipped) this else SetNode.empty - } else if (nodeMap == 0) { - // Performance optimization for nodes of depth 1: - // - // this node has no "node" children, all children are inlined data elems, therefor logic is significantly simpler - // approach: - // * traverse the content array, accumulating in `newDataMap: Int` any bit positions of keys which pass the filter - // * (bitCount(newDataMap) * TupleLength) tells us the new content array and originalHashes array size, so now perform allocations - // * traverse the content array once more, placing each passing element (according to `newDatamap`) in the new content and originalHashes arrays - // - // note: - // * this optimization significantly improves performance of not only small trees, but also larger trees, since - // even non-root nodes are affected by this improvement, and large trees will consist of many nodes as - // descendants - // - val minimumIndex: Int = Integer.numberOfTrailingZeros(dataMap) - val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(dataMap) - - var newDataMap = 0 - var newCachedHashCode = 0 - var dataIndex = 0 - - var i = minimumIndex - - while(i < maximumIndex) { - val bitpos = bitposFrom(i) - - if ((bitpos & dataMap) != 0) { - val payload = getPayload(dataIndex) - val passed = pred(payload) != flipped - - if (passed) { - newDataMap |= bitpos - newCachedHashCode += improve(getHash(dataIndex)) - } - - dataIndex += 1 - } - - i += 1 - } - - if (newDataMap == 0) { - SetNode.empty - } else if (newDataMap == dataMap) { - this - } else { - val newSize = Integer.bitCount(newDataMap) - val newContent = new Array[Any](newSize) - val newOriginalHashCodes = new Array[Int](newSize) - val newMaximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(newDataMap) - - var j = Integer.numberOfTrailingZeros(newDataMap) - - var newDataIndex = 0 - - while (j < newMaximumIndex) { - val bitpos = bitposFrom(j) - if ((bitpos & newDataMap) != 0) { - val oldIndex = indexFrom(dataMap, bitpos) - newContent(newDataIndex) = content(oldIndex) - newOriginalHashCodes(newDataIndex) = originalHashes(oldIndex) - newDataIndex += 1 - } - j += 1 - } - - new BitmapIndexedSetNode(newDataMap, 0, newContent, newOriginalHashCodes, newSize, newCachedHashCode) - } - } else { - val allMap = dataMap | nodeMap - val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap) - val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - - var oldDataPassThrough = 0 - - // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data - var nodeMigrateToDataTargetMap = 0 - - // TODO: When filtering results in a single-elem node, simply `(A, originalHash, improvedHash)` could be returned, - // rather than a singleton node (to avoid pointlessly allocating arrays, nodes, which would just be inlined in - // the parent anyways). This would probably involve changing the return type of filterImpl to `AnyRef` which may - // return at runtime a SetNode[A], or a tuple of (A, Int, Int) - - // the queue of single-element, post-filter nodes - var nodesToMigrateToData: mutable.Queue[SetNode[A]] = null - - // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node - var nodesToPassThroughMap = 0 - - // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself - // These are stored for later inclusion into the final `content` array - // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) - var mapOfNewNodes = 0 - // each bit in `mapOfNewNodes` corresponds to one element in this queue - var newNodes: mutable.Queue[SetNode[A]] = null - - var newDataMap = 0 - var newNodeMap = 0 - var newSize = 0 - var newCachedHashCode = 0 - - var dataIndex = 0 - var nodeIndex = 0 - - var i = minimumIndex - while (i < maximumIndex) { - val bitpos = bitposFrom(i) - - if ((bitpos & dataMap) != 0) { - val payload = getPayload(dataIndex) - val passed = pred(payload) != flipped - - if (passed) { - newDataMap |= bitpos - oldDataPassThrough |= bitpos - newSize += 1 - newCachedHashCode += improve(getHash(dataIndex)) - } - - dataIndex += 1 - } else if ((bitpos & nodeMap) != 0) { - val oldSubNode = getNode(nodeIndex) - val newSubNode = oldSubNode.filterImpl(pred, flipped) - - newSize += newSubNode.size - newCachedHashCode += newSubNode.cachedJavaKeySetHashCode - - // if (newSubNode.size == 0) do nothing (drop it) - if (newSubNode.size > 1) { - newNodeMap |= bitpos - if (oldSubNode eq newSubNode) { - nodesToPassThroughMap |= bitpos - } else { - mapOfNewNodes |= bitpos - if (newNodes eq null) { - newNodes = mutable.Queue.empty[SetNode[A] @uncheckedCaptures] - } - newNodes += newSubNode - } - } else if (newSubNode.size == 1) { - newDataMap |= bitpos - nodeMigrateToDataTargetMap |= bitpos - if (nodesToMigrateToData eq null) { - nodesToMigrateToData = mutable.Queue.empty[SetNode[A] @uncheckedCaptures] - } - nodesToMigrateToData += newSubNode - } - - nodeIndex += 1 - } - - i += 1 - } - - this.newNodeFrom( - newSize = newSize, - newDataMap = newDataMap, - newNodeMap = newNodeMap, - minimumIndex = minimumIndex, - oldDataPassThrough = oldDataPassThrough, - nodesToPassThroughMap = nodesToPassThroughMap, - nodeMigrateToDataTargetMap = nodeMigrateToDataTargetMap, - nodesToMigrateToData = nodesToMigrateToData, - mapOfNewNodes = mapOfNewNodes, - newNodes = newNodes, - newCachedHashCode = newCachedHashCode - ) - } - } - - override def diff(that: SetNode[A], shift: Int): BitmapIndexedSetNode[A] = that match { - case bm: BitmapIndexedSetNode[A] => - if (size == 0) this - else if (size == 1) { - val h = getHash(0) - if (that.contains(getPayload(0), h, improve(h), shift)) SetNode.empty else this - } else { - val allMap = dataMap | nodeMap - val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap) - val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - - var oldDataPassThrough = 0 - - // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data - var nodeMigrateToDataTargetMap = 0 - // the queue of single-element, post-filter nodes - var nodesToMigrateToData: mutable.Queue[SetNode[A]] = null - - // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node - var nodesToPassThroughMap = 0 - - // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself - // These are stored for later inclusion into the final `content` array - // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) - var mapOfNewNodes = 0 - // each bit in `mapOfNewNodes` corresponds to one element in this queue - var newNodes: mutable.Queue[SetNode[A]] = null - - var newDataMap = 0 - var newNodeMap = 0 - var newSize = 0 - var newCachedHashCode = 0 - - var dataIndex = 0 - var nodeIndex = 0 - - var i = minimumIndex - while (i < maximumIndex) { - val bitpos = bitposFrom(i) - - if ((bitpos & dataMap) != 0) { - val payload = getPayload(dataIndex) - val originalHash = getHash(dataIndex) - val hash = improve(originalHash) - - if (!bm.contains(payload, originalHash, hash, shift)) { - newDataMap |= bitpos - oldDataPassThrough |= bitpos - newSize += 1 - newCachedHashCode += hash - } - - dataIndex += 1 - } else if ((bitpos & nodeMap) != 0) { - val oldSubNode = getNode(nodeIndex) - - val newSubNode: SetNode[A] = - if ((bitpos & bm.dataMap) != 0) { - val thatDataIndex = indexFrom(bm.dataMap, bitpos) - val thatPayload = bm.getPayload(thatDataIndex) - val thatOriginalHash = bm.getHash(thatDataIndex) - val thatHash = improve(thatOriginalHash) - oldSubNode.removed(thatPayload, thatOriginalHash, thatHash, shift + BitPartitionSize) - } else if ((bitpos & bm.nodeMap) != 0) { - oldSubNode.diff(bm.getNode(indexFrom(bm.nodeMap, bitpos)), shift + BitPartitionSize) - } else { - oldSubNode - } - - newSize += newSubNode.size - newCachedHashCode += newSubNode.cachedJavaKeySetHashCode - - // if (newSubNode.size == 0) do nothing (drop it) - if (newSubNode.size > 1) { - newNodeMap |= bitpos - if (oldSubNode eq newSubNode) { - nodesToPassThroughMap |= bitpos - } else { - mapOfNewNodes |= bitpos - if (newNodes eq null) { - newNodes = mutable.Queue.empty[SetNode[A] @uncheckedCaptures] - } - newNodes += newSubNode - } - } else if (newSubNode.size == 1) { - newDataMap |= bitpos - nodeMigrateToDataTargetMap |= bitpos - if (nodesToMigrateToData eq null) { - nodesToMigrateToData = mutable.Queue.empty[SetNode[A] @uncheckedCaptures] - } - nodesToMigrateToData += newSubNode - } - - nodeIndex += 1 - } - - i += 1 - } - this.newNodeFrom( - newSize = newSize, - newDataMap = newDataMap, - newNodeMap = newNodeMap, - minimumIndex = minimumIndex, - oldDataPassThrough = oldDataPassThrough, - nodesToPassThroughMap = nodesToPassThroughMap, - nodeMigrateToDataTargetMap = nodeMigrateToDataTargetMap, - nodesToMigrateToData = nodesToMigrateToData, - mapOfNewNodes = mapOfNewNodes, - newNodes = newNodes, - newCachedHashCode = newCachedHashCode - ) - } - case _: HashCollisionSetNode[A] => - // this branch should never happen, because HashCollisionSetNodes and BitMapIndexedSetNodes do not occur at the - // same depth - throw new RuntimeException("BitmapIndexedSetNode diff HashCollisionSetNode") - } - - /** Utility method only for use in `diff` and `filterImpl` - * - * @param newSize the size of the new SetNode - * @param newDataMap the dataMap of the new SetNode - * @param newNodeMap the nodeMap of the new SetNode - * @param minimumIndex the minimum index (in range of [0, 31]) for which there are sub-nodes or data beneath the new - * SetNode - * @param oldDataPassThrough bitmap representing all the data that are just passed from `this` to the new - * SetNode - * @param nodesToPassThroughMap bitmap representing all nodes that are just passed from `this` to the new SetNode - * @param nodeMigrateToDataTargetMap bitmap representing all positions which will now be data in the new SetNode, - * but which were nodes in `this` - * @param nodesToMigrateToData a queue (in order of child position) of single-element nodes, which will be migrated - * to data, in positions in the `nodeMigrateToDataTargetMap` - * @param mapOfNewNodes bitmap of positions of new nodes to include in the new SetNode - * @param newNodes queue in order of child position, of all new nodes to include in the new SetNode - * @param newCachedHashCode the cached java keyset hashcode of the new SetNode - */ - private[this] def newNodeFrom( - newSize: Int, - newDataMap: Int, - newNodeMap: Int, - minimumIndex: Int, - oldDataPassThrough: Int, - nodesToPassThroughMap: Int, - nodeMigrateToDataTargetMap: Int, - nodesToMigrateToData: mutable.Queue[SetNode[A]], - mapOfNewNodes: Int, - newNodes: mutable.Queue[SetNode[A]], - newCachedHashCode: Int): BitmapIndexedSetNode[A] = { - if (newSize == 0) { - SetNode.empty - } else if (newSize == size) { - this - } else { - val newDataSize = bitCount(newDataMap) - val newContentSize = newDataSize + bitCount(newNodeMap) - val newContent = new Array[Any](newContentSize) - val newOriginalHashes = new Array[Int](newDataSize) - - val newAllMap = newDataMap | newNodeMap - val maxIndex = Node.BranchingFactor - Integer.numberOfLeadingZeros(newAllMap) - - // note: We MUST start from the minimum index in the old (`this`) node, otherwise `old{Node,Data}Index` will - // not be incremented properly. Otherwise we could have started at Integer.numberOfTrailingZeroes(newAllMap) - var i = minimumIndex - - var oldDataIndex = 0 - var oldNodeIndex = 0 - - var newDataIndex = 0 - var newNodeIndex = 0 - - while (i < maxIndex) { - val bitpos = bitposFrom(i) - - if ((bitpos & oldDataPassThrough) != 0) { - newContent(newDataIndex) = getPayload(oldDataIndex) - newOriginalHashes(newDataIndex) = getHash(oldDataIndex) - newDataIndex += 1 - oldDataIndex += 1 - } else if ((bitpos & nodesToPassThroughMap) != 0) { - newContent(newContentSize - newNodeIndex - 1) = getNode(oldNodeIndex) - newNodeIndex += 1 - oldNodeIndex += 1 - } else if ((bitpos & nodeMigrateToDataTargetMap) != 0) { - // we need not check for null here. If nodeMigrateToDataTargetMap != 0, then nodesMigrateToData must not be null - val node = nodesToMigrateToData.dequeue() - newContent(newDataIndex) = node.getPayload(0) - newOriginalHashes(newDataIndex) = node.getHash(0) - newDataIndex += 1 - oldNodeIndex += 1 - } else if ((bitpos & mapOfNewNodes) != 0) { - // we need not check for null here. If mapOfNewNodes != 0, then newNodes must not be null - newContent(newContentSize - newNodeIndex - 1) = newNodes.dequeue() - newNodeIndex += 1 - oldNodeIndex += 1 - } else if ((bitpos & dataMap) != 0) { - oldDataIndex += 1 - } else if ((bitpos & nodeMap) != 0) { - oldNodeIndex += 1 - } - - i += 1 - } - - new BitmapIndexedSetNode[A](newDataMap, newNodeMap, newContent, newOriginalHashes, newSize, newCachedHashCode) - } - } - - - override def equals(that: Any): Boolean = - that match { - case node: BitmapIndexedSetNode[_] => - (this eq node) || - (this.cachedJavaKeySetHashCode == node.cachedJavaKeySetHashCode) && - (this.nodeMap == node.nodeMap) && - (this.dataMap == node.dataMap) && - (this.size == node.size) && - java.util.Arrays.equals(this.originalHashes, node.originalHashes) && - deepContentEquality(this.content, node.content, content.length) - case _ => false - } - - @`inline` private def deepContentEquality(a1: Array[Any], a2: Array[Any], length: Int): Boolean = { - if (a1 eq a2) - true - else { - var isEqual = true - var i = 0 - - while (isEqual && i < length) { - isEqual = a1(i) == a2(i) - i += 1 - } - - isEqual - } - } - - override def hashCode(): Int = - throw new UnsupportedOperationException("Trie nodes do not support hashing.") - - override def copy(): BitmapIndexedSetNode[A] = { - val contentClone = content.clone() - val contentLength = contentClone.length - var i = bitCount(dataMap) - while (i < contentLength) { - contentClone(i) = contentClone(i).asInstanceOf[SetNode[A]].copy() - i += 1 - } - new BitmapIndexedSetNode[A](dataMap, nodeMap, contentClone, originalHashes.clone(), size, cachedJavaKeySetHashCode) - } - - override def concat(that: SetNode[A], shift: Int): BitmapIndexedSetNode[A] = that match { - case bm: BitmapIndexedSetNode[A] => - if (size == 0) return bm - else if (bm.size == 0 || (bm eq this)) return this - else if (bm.size == 1) { - val originalHash = bm.getHash(0) - return this.updated(bm.getPayload(0), originalHash, improve(originalHash), shift) - } - - // if we go through the merge and the result does not differ from `this`, we can just return `this`, to improve sharing - // So, `anyChangesMadeSoFar` will be set to `true` as soon as we encounter a difference between the - // currently-being-computed result, and `this` - var anyChangesMadeSoFar = false - - // bitmap containing `1` in any position that has any descendant in either left or right, either data or node - val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap - - // minimumIndex is inclusive -- it is the first index for which there is data or nodes - val minimumBitPos: Int = Node.bitposFrom(Integer.numberOfTrailingZeros(allMap)) - // maximumIndex is inclusive -- it is the last index for which there is data or nodes - // it could not be exclusive, because then upper bound in worst case (Node.BranchingFactor) would be out-of-bound - // of int bitposition representation - val maximumBitPos: Int = Node.bitposFrom(Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - 1) - - var leftNodeRightNode = 0 - var leftDataRightNode = 0 - var leftNodeRightData = 0 - var leftDataOnly = 0 - var rightDataOnly = 0 - var leftNodeOnly = 0 - var rightNodeOnly = 0 - var leftDataRightDataMigrateToNode = 0 - var leftDataRightDataLeftOverwrites = 0 - - var dataToNodeMigrationTargets = 0 - - { - var bitpos = minimumBitPos - var leftIdx = 0 - var rightIdx = 0 - var finished = false - - while (!finished) { - - if ((bitpos & dataMap) != 0) { - if ((bitpos & bm.dataMap) != 0) { - if (getHash(leftIdx) == bm.getHash(rightIdx) && getPayload(leftIdx) == bm.getPayload(rightIdx)) { - leftDataRightDataLeftOverwrites |= bitpos - } else { - leftDataRightDataMigrateToNode |= bitpos - dataToNodeMigrationTargets |= Node.bitposFrom(Node.maskFrom(improve(getHash(leftIdx)), shift)) - } - rightIdx += 1 - } else if ((bitpos & bm.nodeMap) != 0) { - leftDataRightNode |= bitpos - } else { - leftDataOnly |= bitpos - } - leftIdx += 1 - } else if ((bitpos & nodeMap) != 0) { - if ((bitpos & bm.dataMap) != 0) { - leftNodeRightData |= bitpos - rightIdx += 1 - } else if ((bitpos & bm.nodeMap) != 0) { - leftNodeRightNode |= bitpos - } else { - leftNodeOnly |= bitpos - } - } else if ((bitpos & bm.dataMap) != 0) { - rightDataOnly |= bitpos - rightIdx += 1 - } else if ((bitpos & bm.nodeMap) != 0) { - rightNodeOnly |= bitpos - } - - if (bitpos == maximumBitPos) { - finished = true - } else { - bitpos = bitpos << 1 - } - } - } - - - val newDataMap = leftDataOnly | rightDataOnly | leftDataRightDataLeftOverwrites - - val newNodeMap = - leftNodeRightNode | - leftDataRightNode | - leftNodeRightData | - leftNodeOnly | - rightNodeOnly | - dataToNodeMigrationTargets - - - if ((newDataMap == (leftDataOnly | leftDataRightDataLeftOverwrites)) && (newNodeMap == leftNodeOnly)) { - // nothing from `bm` will make it into the result -- return early - return this - } - - val newDataSize = bitCount(newDataMap) - val newContentSize = newDataSize + bitCount(newNodeMap) - - val newContent = new Array[Any](newContentSize) - val newOriginalHashes = new Array[Int](newDataSize) - var newSize = 0 - var newCachedHashCode = 0 - - { - var leftDataIdx = 0 - var rightDataIdx = 0 - var leftNodeIdx = 0 - var rightNodeIdx = 0 - - val nextShift = shift + Node.BitPartitionSize - - var compressedDataIdx = 0 - var compressedNodeIdx = 0 - - var bitpos = minimumBitPos - var finished = false - - while (!finished) { - - if ((bitpos & leftNodeRightNode) != 0) { - val leftNode = getNode(leftNodeIdx) - val newNode = leftNode.concat(bm.getNode(rightNodeIdx), nextShift) - if (leftNode ne newNode) { - anyChangesMadeSoFar = true - } - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - rightNodeIdx += 1 - leftNodeIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - - } else if ((bitpos & leftDataRightNode) != 0) { - anyChangesMadeSoFar = true - val newNode = { - val n = bm.getNode(rightNodeIdx) - val leftPayload = getPayload(leftDataIdx) - val leftOriginalHash = getHash(leftDataIdx) - val leftImproved = improve(leftOriginalHash) - n.updated(leftPayload, leftOriginalHash, leftImproved, nextShift) - } - - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - rightNodeIdx += 1 - leftDataIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - } - else if ((bitpos & leftNodeRightData) != 0) { - val newNode = { - val rightOriginalHash = bm.getHash(rightDataIdx) - val leftNode = getNode(leftNodeIdx) - val updated = leftNode.updated( - element = bm.getPayload(rightDataIdx), - originalHash = bm.getHash(rightDataIdx), - hash = improve(rightOriginalHash), - shift = nextShift - ) - if (updated ne leftNode) { - anyChangesMadeSoFar = true - } - updated - } - - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - leftNodeIdx += 1 - rightDataIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - - } else if ((bitpos & leftDataOnly) != 0) { - val originalHash = originalHashes(leftDataIdx) - newContent(compressedDataIdx) = getPayload(leftDataIdx).asInstanceOf[AnyRef] - newOriginalHashes(compressedDataIdx) = originalHash - - compressedDataIdx += 1 - leftDataIdx += 1 - newSize += 1 - newCachedHashCode += improve(originalHash) - } else if ((bitpos & rightDataOnly) != 0) { - anyChangesMadeSoFar = true - val originalHash = bm.originalHashes(rightDataIdx) - newContent(compressedDataIdx) = bm.getPayload(rightDataIdx).asInstanceOf[AnyRef] - newOriginalHashes(compressedDataIdx) = originalHash - - compressedDataIdx += 1 - rightDataIdx += 1 - newSize += 1 - newCachedHashCode += improve(originalHash) - } else if ((bitpos & leftNodeOnly) != 0) { - val newNode = getNode(leftNodeIdx) - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - leftNodeIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - } else if ((bitpos & rightNodeOnly) != 0) { - anyChangesMadeSoFar = true - val newNode = bm.getNode(rightNodeIdx) - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - rightNodeIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - } else if ((bitpos & leftDataRightDataMigrateToNode) != 0) { - anyChangesMadeSoFar = true - val newNode = { - val leftOriginalHash = getHash(leftDataIdx) - val rightOriginalHash = bm.getHash(rightDataIdx) - - bm.mergeTwoKeyValPairs( - getPayload(leftDataIdx), leftOriginalHash, improve(leftOriginalHash), - bm.getPayload(rightDataIdx), rightOriginalHash, improve(rightOriginalHash), - nextShift - ) - } - - newContent(newContentSize - compressedNodeIdx - 1) = newNode - compressedNodeIdx += 1 - leftDataIdx += 1 - rightDataIdx += 1 - newSize += newNode.size - newCachedHashCode += newNode.cachedJavaKeySetHashCode - } else if ((bitpos & leftDataRightDataLeftOverwrites) != 0) { - val originalHash = bm.originalHashes(rightDataIdx) - newContent(compressedDataIdx) = bm.getPayload(rightDataIdx).asInstanceOf[AnyRef] - newOriginalHashes(compressedDataIdx) = originalHash - - compressedDataIdx += 1 - rightDataIdx += 1 - newSize += 1 - newCachedHashCode += improve(originalHash) - leftDataIdx += 1 - } - - if (bitpos == maximumBitPos) { - finished = true - } else { - bitpos = bitpos << 1 - } - } - } - - if (anyChangesMadeSoFar) - new BitmapIndexedSetNode( - dataMap = newDataMap, - nodeMap = newNodeMap, - content = newContent, - originalHashes = newOriginalHashes, - size = newSize, - cachedJavaKeySetHashCode = newCachedHashCode - ) - else this - - case _ => - // should never happen -- hash collisions are never at the same level as bitmapIndexedSetNodes - throw new UnsupportedOperationException("Cannot concatenate a HashCollisionSetNode with a BitmapIndexedSetNode") - } - - override def foreachWithHash(f: (A, Int) => Unit): Unit = { - val iN = payloadArity // arity doesn't change during this operation - var i = 0 - while (i < iN) { - f(getPayload(i), getHash(i)) - i += 1 - } - - val jN = nodeArity // arity doesn't change during this operation - var j = 0 - while (j < jN) { - getNode(j).foreachWithHash(f) - j += 1 - } - } - - override def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean = { - val thisPayloadArity = payloadArity - var pass = true - var i = 0 - while (i < thisPayloadArity && pass) { - pass &&= f(getPayload(i), getHash(i)) - i += 1 - } - - val thisNodeArity = nodeArity - var j = 0 - while (j < thisNodeArity && pass) { - pass &&= getNode(j).foreachWithHashWhile(f) - j += 1 - } - pass - } -} - -private final class HashCollisionSetNode[A](val originalHash: Int, val hash: Int, var content: Vector[A] @uncheckedCaptures) extends SetNode[A] { - - import Node._ - - require(content.length >= 2) - - def contains(element: A, originalHash: Int, hash: Int, shift: Int): Boolean = - this.hash == hash && content.contains(element) - - def updated(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] = - if (this.contains(element, originalHash, hash, shift)) { - this - } else { - new HashCollisionSetNode[A](originalHash, hash, content.appended(element)) - } - - /** - * Remove an element from the hash collision node. - * - * When after deletion only one element remains, we return a bit-mapped indexed node with a - * singleton element and a hash-prefix for trie level 0. This node will be then a) either become - * the new root, or b) unwrapped and inlined deeper in the trie. - */ - def removed(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] = - if (!this.contains(element, originalHash, hash, shift)) { - this - } else { - val updatedContent = content.filterNot(element0 => element0 == element) - // assert(updatedContent.size == content.size - 1) - - updatedContent.size match { - case 1 => new BitmapIndexedSetNode[A](bitposFrom(maskFrom(hash, 0)), 0, Array(updatedContent(0)), Array(originalHash), 1, hash) - case _ => new HashCollisionSetNode[A](originalHash, hash, updatedContent) - } - } - - def hasNodes: Boolean = false - - def nodeArity: Int = 0 - - def getNode(index: Int): SetNode[A] = - throw new IndexOutOfBoundsException("No sub-nodes present in hash-collision leaf node.") - - def hasPayload: Boolean = true - - def payloadArity: Int = content.length - - def getPayload(index: Int): A = content(index) - - override def getHash(index: Int): Int = originalHash - - def size: Int = content.length - - def foreach[U](f: A => U): Unit = { - val iter = content.iterator - while (iter.hasNext) { - f(iter.next()) - } - } - - - override def cachedJavaKeySetHashCode: Int = size * hash - - def subsetOf(that: SetNode[A], shift: Int): Boolean = if (this eq that) true else that match { - case node: HashCollisionSetNode[A] => - this.payloadArity <= node.payloadArity && this.content.forall(node.content.contains) - case _ => - false - } - - override def filterImpl(pred: A => Boolean, flipped: Boolean): SetNode[A] = { - val newContent = content.filterImpl(pred, flipped) - val newContentLength = newContent.length - if (newContentLength == 0) { - SetNode.empty - } else if (newContentLength == 1) { - new BitmapIndexedSetNode[A](bitposFrom(maskFrom(hash, 0)), 0, Array(newContent.head), Array(originalHash), 1, hash) - } else if (newContent.length == content.length) this - else new HashCollisionSetNode(originalHash, hash, newContent) - } - - override def diff(that: SetNode[A], shift: Int): SetNode[A] = - filterImpl(that.contains(_, originalHash, hash, shift), true) - - override def equals(that: Any): Boolean = - that match { - case node: HashCollisionSetNode[_] => - (this eq node) || - (this.hash == node.hash) && - (this.content.size == node.content.size) && - this.content.forall(node.content.contains) - case _ => false - } - - override def hashCode(): Int = - throw new UnsupportedOperationException("Trie nodes do not support hashing.") - - override def copy() = new HashCollisionSetNode[A](originalHash, hash, content) - - override def concat(that: SetNode[A], shift: Int): SetNode[A] = that match { - case hc: HashCollisionSetNode[A] => - if (hc eq this) { - this - } else { - var newContent: VectorBuilder[A] = null - val iter = hc.content.iterator - while (iter.hasNext) { - val nextPayload = iter.next() - if (!content.contains(nextPayload)) { - if (newContent eq null) { - newContent = new VectorBuilder() - newContent.addAll(this.content) - } - newContent.addOne(nextPayload) - } - } - if (newContent eq null) this else new HashCollisionSetNode(originalHash, hash, newContent.result()) - } - case _: BitmapIndexedSetNode[A] => - // should never happen -- hash collisions are never at the same level as bitmapIndexedSetNodes - throw new UnsupportedOperationException("Cannot concatenate a HashCollisionSetNode with a BitmapIndexedSetNode") - } - - override def foreachWithHash(f: (A, Int) => Unit): Unit = { - val iter = content.iterator - while (iter.hasNext) { - val next = iter.next() - f(next.asInstanceOf[A], originalHash) - } - } - - override def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean = { - var stillGoing = true - val iter = content.iterator - while (iter.hasNext && stillGoing) { - val next = iter.next() - stillGoing &&= f(next.asInstanceOf[A], originalHash) - } - stillGoing - } -} - -private final class SetIterator[A](rootNode: SetNode[A]) - extends ChampBaseIterator[SetNode[A]](rootNode) with Iterator[A] { - - def next() = { - if (!hasNext) - throw new NoSuchElementException - - val payload = currentValueNode.getPayload(currentValueCursor) - currentValueCursor += 1 - - payload - } - -} - -private final class SetReverseIterator[A](rootNode: SetNode[A]) - extends ChampBaseReverseIterator[SetNode[A]](rootNode) with Iterator[A] { - - def next(): A = { - if (!hasNext) - throw new NoSuchElementException - - val payload = currentValueNode.getPayload(currentValueCursor) - currentValueCursor -= 1 - - payload - } - -} - -private final class SetHashIterator[A](rootNode: SetNode[A]) - extends ChampBaseIterator[SetNode[A]](rootNode) with Iterator[AnyRef] { - private[this] var hash = 0 - override def hashCode(): Int = hash - - def next(): AnyRef = { - if (!hasNext) - throw new NoSuchElementException - - hash = currentValueNode.getHash(currentValueCursor) - currentValueCursor += 1 - this - } - -} - - -/** - * $factoryInfo - * - * @define Coll `immutable.HashSet` - * @define coll immutable champ hash set - */ -@SerialVersionUID(3L) -object HashSet extends IterableFactory[HashSet] { - - @transient - private final val EmptySet = new HashSet(SetNode.empty) - - def empty[A]: HashSet[A] = - EmptySet.asInstanceOf[HashSet[A]] - - def from[A](source: collection.IterableOnce[A]^): HashSet[A] = - source match { - case hs: HashSet[A] => hs - case _ if source.knownSize == 0 => empty[A] - case _ => (newBuilder[A] ++= source).result() - } - - /** Create a new Builder which can be reused after calling `result()` without an - * intermediate call to `clear()` in order to build multiple related results. - */ - def newBuilder[A]: ReusableBuilder[A, HashSet[A]] = new HashSetBuilder -} - -/** Builder for HashSet. - * $multipleResults - */ -private[collection] final class HashSetBuilder[A] extends ReusableBuilder[A, HashSet[A]] { - import Node._ - import SetNode._ - - private def newEmptyRootNode = new BitmapIndexedSetNode[A](0, 0, Array.emptyObjectArray.asInstanceOf[Array[Any]], Array.emptyIntArray, 0, 0) - - /** The last given out HashSet as a return value of `result()`, if any, otherwise null. - * Indicates that on next add, the elements should be copied to an identical structure, before continuing - * mutations. */ - private var aliased: HashSet[A] @uncheckedCaptures = _ - - private def isAliased: Boolean = aliased != null - - /** The root node of the partially build hashmap */ - private var rootNode: BitmapIndexedSetNode[A] @uncheckedCaptures = newEmptyRootNode - - /** Inserts element `elem` into array `as` at index `ix`, shifting right the trailing elems */ - private def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { - if (ix < 0) throw new ArrayIndexOutOfBoundsException - if (ix > as.length) throw new ArrayIndexOutOfBoundsException - val result = new Array[Int](as.length + 1) - arraycopy(as, 0, result, 0, ix) - result(ix) = elem - arraycopy(as, ix, result, ix + 1, as.length - ix) - result - } - - /** Inserts key-value into the bitmapIndexMapNode. Requires that this is a new key-value pair */ - private def insertValue[A1 >: A](bm: BitmapIndexedSetNode[A], bitpos: Int, key: A, originalHash: Int, keyHash: Int): Unit = { - val dataIx = bm.dataIndex(bitpos) - val idx = TupleLength * dataIx - - val src = bm.content - val dst = new Array[Any](src.length + TupleLength) - - // copy 'src' and insert 2 element(s) at position 'idx' - arraycopy(src, 0, dst, 0, idx) - dst(idx) = key - arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) - - val dstHashes = insertElement(bm.originalHashes, dataIx, originalHash) - - bm.dataMap = bm.dataMap | bitpos - bm.content = dst - bm.originalHashes = dstHashes - bm.size += 1 - bm.cachedJavaKeySetHashCode += keyHash - } - - /** Mutates `bm` to replace inline data at bit position `bitpos` with updated key/value */ - private def setValue[A1 >: A](bm: BitmapIndexedSetNode[A], bitpos: Int, elem: A): Unit = { - val dataIx = bm.dataIndex(bitpos) - val idx = TupleLength * dataIx - bm.content(idx) = elem - } - - def update(setNode: SetNode[A], element: A, originalHash: Int, elementHash: Int, shift: Int): Unit = - setNode match { - case bm: BitmapIndexedSetNode[A] => - val mask = maskFrom(elementHash, shift) - val bitpos = bitposFrom(mask) - - if ((bm.dataMap & bitpos) != 0) { - val index = indexFrom(bm.dataMap, mask, bitpos) - val element0 = bm.getPayload(index) - val element0UnimprovedHash = bm.getHash(index) - - if (element0UnimprovedHash == originalHash && element0 == element) { - setValue(bm, bitpos, element0) - } else { - val element0Hash = improve(element0UnimprovedHash) - val subNodeNew = bm.mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize) - bm.migrateFromInlineToNodeInPlace(bitpos, element0Hash, subNodeNew) - } - } else if ((bm.nodeMap & bitpos) != 0) { - val index = indexFrom(bm.nodeMap, mask, bitpos) - val subNode = bm.getNode(index) - val beforeSize = subNode.size - val beforeHashCode = subNode.cachedJavaKeySetHashCode - update(subNode, element, originalHash, elementHash, shift + BitPartitionSize) - bm.size += subNode.size - beforeSize - bm.cachedJavaKeySetHashCode += subNode.cachedJavaKeySetHashCode - beforeHashCode - } else { - insertValue(bm, bitpos, element, originalHash, elementHash) - } - case hc: HashCollisionSetNode[A] => - val index = hc.content.indexOf(element) - if (index < 0) { - hc.content = hc.content.appended(element) - } else { - hc.content = hc.content.updated(index, element) - } - } - - /** If currently referencing aliased structure, copy elements to new mutable structure */ - private def ensureUnaliased():Unit = { - if (isAliased) copyElems() - aliased = null - } - - /** Copy elements to new mutable structure */ - private def copyElems(): Unit = { - rootNode = rootNode.copy() - } - - override def result(): HashSet[A] = - if (rootNode.size == 0) { - HashSet.empty - } else if (aliased != null) { - aliased - } else { - aliased = new HashSet(rootNode) - releaseFence() - aliased - } - - override def addOne(elem: A): this.type = { - ensureUnaliased() - val h = elem.## - val im = improve(h) - update(rootNode, elem, h, im, 0) - this - } - - override def addAll(xs: IterableOnce[A]^) = { - ensureUnaliased() - xs match { - case hm: HashSet[A] => - new ChampBaseIterator[SetNode[A]](hm.rootNode) { - while(hasNext) { - val originalHash = currentValueNode.getHash(currentValueCursor) - update( - setNode = rootNode, - element = currentValueNode.getPayload(currentValueCursor), - originalHash = originalHash, - elementHash = improve(originalHash), - shift = 0 - ) - currentValueCursor += 1 - } - }.asInstanceOf // !!! cc gets confused with representation of capture sets in invariant position - case other => - val it = other.iterator - while(it.hasNext) addOne(it.next()) - } - - this - } - - override def clear(): Unit = { - aliased = null - if (rootNode.size > 0) { - // if rootNode is empty, we will not have given it away anyways, we instead give out the reused Set.empty - rootNode = newEmptyRootNode - } - } - - private[collection] def size: Int = rootNode.size - - override def knownSize: Int = rootNode.size -} diff --git a/tests/pos-special/stdlib/collection/immutable/IntMap.scala b/tests/pos-special/stdlib/collection/immutable/IntMap.scala deleted file mode 100644 index d7077845b845..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/IntMap.scala +++ /dev/null @@ -1,504 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package immutable - -import scala.collection.generic.{BitOperations, DefaultSerializationProxy} -import scala.collection.mutable.{Builder, ImmutableBuilder} -import scala.annotation.tailrec -import scala.annotation.unchecked.uncheckedVariance -import scala.language.implicitConversions -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** Utility class for integer maps. - */ -private[immutable] object IntMapUtils extends BitOperations.Int { - def branchMask(i: Int, j: Int) = highestOneBit(i ^ j) - - def join[T](p1: Int, t1: IntMap[T], p2: Int, t2: IntMap[T]): IntMap[T] = { - val m = branchMask(p1, p2) - val p = mask(p1, m) - if (zero(p1, m)) IntMap.Bin(p, m, t1, t2) - else IntMap.Bin(p, m, t2, t1) - } - - def bin[T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]): IntMap[T] = (left, right) match { - case (left, IntMap.Nil) => left - case (IntMap.Nil, right) => right - case (left, right) => IntMap.Bin(prefix, mask, left, right) - } -} - -import IntMapUtils._ - -/** A companion object for integer maps. - * - * @define Coll `IntMap` - */ -object IntMap { - def empty[T] : IntMap[T] = IntMap.Nil - - def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value) - - def apply[T](elems: (Int, T)*): IntMap[T] = - elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) - - def from[V](coll: IterableOnce[(Int, V)]^): IntMap[V] = - newBuilder[V].addAll(coll).result() - - private[immutable] case object Nil extends IntMap[Nothing] { - // Important! Without this equals method in place, an infinite - // loop from Map.equals => size => pattern-match-on-Nil => equals - // develops. Case objects and custom equality don't mix without - // careful handling. - override def equals(that : Any) = that match { - case _: this.type => true - case _: IntMap[_] => false // The only empty IntMaps are eq Nil - case _ => super.equals(that) - } - } - - private[immutable] case class Tip[+T](key: Int, value: T) extends IntMap[T]{ - def withValue[S](s: S) = - if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap.Tip[S]] - else IntMap.Tip(key, s) - } - - private[immutable] case class Bin[+T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]) extends IntMap[T] { - def bin[S](left: IntMap[S], right: IntMap[S]): IntMap[S] = { - if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[IntMap.Bin[S]] - else IntMap.Bin[S](prefix, mask, left, right) - } - } - - def newBuilder[V]: Builder[(Int, V), IntMap[V]] = - new ImmutableBuilder[(Int, V), IntMap[V]](empty) { - def addOne(elem: (Int, V)): this.type = { elems = elems + elem; this } - } - - implicit def toFactory[V](dummy: IntMap.type): Factory[(Int, V), IntMap[V]] = ToFactory.asInstanceOf[Factory[(Int, V), IntMap[V]]] - - @SerialVersionUID(3L) - private[this] object ToFactory extends Factory[(Int, AnyRef), IntMap[AnyRef]] with Serializable { - def fromSpecific(it: IterableOnce[(Int, AnyRef)]^): IntMap[AnyRef] = IntMap.from[AnyRef](it) - def newBuilder: Builder[(Int, AnyRef), IntMap[AnyRef]] = IntMap.newBuilder[AnyRef] - } - - implicit def toBuildFrom[V](factory: IntMap.type): BuildFrom[Any, (Int, V), IntMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Int, V), IntMap[V]]] - private[this] object ToBuildFrom extends BuildFrom[Any, (Int, AnyRef), IntMap[AnyRef]] { - def fromSpecific(from: Any)(it: IterableOnce[(Int, AnyRef)]^) = IntMap.from(it) - def newBuilder(from: Any) = IntMap.newBuilder[AnyRef] - } - - implicit def iterableFactory[V]: Factory[(Int, V), IntMap[V]] = toFactory(this) - implicit def buildFromIntMap[V]: BuildFrom[IntMap[_], (Int, V), IntMap[V]] = toBuildFrom(this) -} - -// Iterator over a non-empty IntMap. -private[immutable] abstract class IntMapIterator[V, T](it: IntMap[V]) extends AbstractIterator[T] { - - // Basically this uses a simple stack to emulate conversion over the tree. However - // because we know that Ints are at least 32 bits we can have at most 32 IntMap.Bins and - // one IntMap.Tip sitting on the tree at any point. Therefore we know the maximum stack - // depth is 33 and - var index = 0 - var buffer = new Array[AnyRef](33) - - def pop = { - index -= 1 - buffer(index).asInstanceOf[IntMap[V]] - } - - def push(x: IntMap[V]): Unit = { - buffer(index) = x.asInstanceOf[AnyRef] - index += 1 - } - push(it) - - /** - * What value do we assign to a tip? - */ - def valueOf(tip: IntMap.Tip[V]): T - - def hasNext = index != 0 - @tailrec - final def next(): T = - pop match { - case IntMap.Bin(_,_, t@IntMap.Tip(_, _), right) => { - push(right) - valueOf(t) - } - case IntMap.Bin(_, _, left, right) => { - push(right) - push(left) - next() - } - case t@IntMap.Tip(_, _) => valueOf(t) - // This should never happen. We don't allow IntMap.Nil in subtrees of the IntMap - // and don't return an IntMapIterator for IntMap.Nil. - case IntMap.Nil => throw new IllegalStateException("Empty maps not allowed as subtrees") - } -} - -private[immutable] class IntMapEntryIterator[V](it: IntMap[V]) extends IntMapIterator[V, (Int, V)](it) { - def valueOf(tip: IntMap.Tip[V]) = (tip.key, tip.value) -} - -private[immutable] class IntMapValueIterator[V](it: IntMap[V]) extends IntMapIterator[V, V](it) { - def valueOf(tip: IntMap.Tip[V]) = tip.value -} - -private[immutable] class IntMapKeyIterator[V](it: IntMap[V]) extends IntMapIterator[V, Int](it) { - def valueOf(tip: IntMap.Tip[V]) = tip.key -} - -import IntMap._ - -/** Specialised immutable map structure for integer keys, based on - * [[https://ittc.ku.edu/~andygill/papers/IntMap98.pdf Fast Mergeable Integer Maps]] - * by Okasaki and Gill. Essentially a trie based on binary digits of the integers. - * - * '''Note:''' This class is as of 2.8 largely superseded by HashMap. - * - * @tparam T type of the values associated with integer keys. - * - * @define Coll `immutable.IntMap` - * @define coll immutable integer map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -sealed abstract class IntMap[+T] extends AbstractMap[Int, T] - with StrictOptimizedMapOps[Int, T, Map, IntMap[T]] - with Serializable { - - override protected def fromSpecific(coll: scala.collection.IterableOnce[(Int, T) @uncheckedVariance]^): IntMap[T] = - intMapFrom[T](coll) - protected def intMapFrom[V2](coll: scala.collection.IterableOnce[(Int, V2)]^): IntMap[V2] = { - val b = IntMap.newBuilder[V2] - b.sizeHint(coll) - b.addAll(coll) - b.result() - } - override protected def newSpecificBuilder: Builder[(Int, T), IntMap[T]] @uncheckedVariance = - new ImmutableBuilder[(Int, T), IntMap[T]](empty) { - def addOne(elem: (Int, T)): this.type = { elems = elems + elem; this } - } - - override def empty: IntMap[T] = IntMap.Nil - - override def toList = { - val buffer = new scala.collection.mutable.ListBuffer[(Int, T) @uncheckedCaptures] - foreach(buffer += _) - buffer.toList - } - - /** - * Iterator over key, value pairs of the map in unsigned order of the keys. - * - * @return an iterator over pairs of integer keys and corresponding values. - */ - def iterator: Iterator[(Int, T)] = this match { - case IntMap.Nil => Iterator.empty - case _ => new IntMapEntryIterator(this) - } - - /** - * Loops over the key, value pairs of the map in unsigned order of the keys. - */ - override final def foreach[U](f: ((Int, T)) => U): Unit = this match { - case IntMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) } - case IntMap.Tip(key, value) => f((key, value)) - case IntMap.Nil => - } - - override def foreachEntry[U](f: (IntMapUtils.Int, T) => U): Unit = this match { - case IntMap.Bin(_, _, left, right) => { left.foreachEntry(f); right.foreachEntry(f) } - case IntMap.Tip(key, value) => f(key, value) - case IntMap.Nil => - } - - override def keysIterator: Iterator[Int] = this match { - case IntMap.Nil => Iterator.empty - case _ => new IntMapKeyIterator(this) - } - - /** - * Loop over the keys of the map. The same as `keys.foreach(f)`, but may - * be more efficient. - * - * @param f The loop body - */ - final def foreachKey[U](f: Int => U): Unit = this match { - case IntMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) } - case IntMap.Tip(key, _) => f(key) - case IntMap.Nil => - } - - override def valuesIterator: Iterator[T] = this match { - case IntMap.Nil => Iterator.empty - case _ => new IntMapValueIterator(this) - } - - /** - * Loop over the values of the map. The same as `values.foreach(f)`, but may - * be more efficient. - * - * @param f The loop body - */ - final def foreachValue[U](f: T => U): Unit = this match { - case IntMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) } - case IntMap.Tip(_, value) => f(value) - case IntMap.Nil => - } - - override protected[this] def className = "IntMap" - - override def isEmpty = this eq IntMap.Nil - override def knownSize: Int = if (isEmpty) 0 else super.knownSize - override def filter(f: ((Int, T)) => Boolean): IntMap[T] = this match { - case IntMap.Bin(prefix, mask, left, right) => { - val (newleft, newright) = (left.filter(f), right.filter(f)) - if ((left eq newleft) && (right eq newright)) this - else bin(prefix, mask, newleft, newright) - } - case IntMap.Tip(key, value) => - if (f((key, value))) this - else IntMap.Nil - case IntMap.Nil => IntMap.Nil - } - - override def transform[S](f: (Int, T) => S): IntMap[S] = this match { - case b@IntMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f)) - case t@IntMap.Tip(key, value) => t.withValue(f(key, value)) - case IntMap.Nil => IntMap.Nil - } - - final override def size: Int = this match { - case IntMap.Nil => 0 - case IntMap.Tip(_, _) => 1 - case IntMap.Bin(_, _, left, right) => left.size + right.size - } - - @tailrec - final def get(key: Int): Option[T] = this match { - case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key) - case IntMap.Tip(key2, value) => if (key == key2) Some(value) else None - case IntMap.Nil => None - } - - @tailrec - final override def getOrElse[S >: T](key: Int, default: => S): S = this match { - case IntMap.Nil => default - case IntMap.Tip(key2, value) => if (key == key2) value else default - case IntMap.Bin(prefix, mask, left, right) => - if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default) - } - - @tailrec - final override def apply(key: Int): T = this match { - case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key) - case IntMap.Tip(key2, value) => if (key == key2) value else throw new IllegalArgumentException("Key not found") - case IntMap.Nil => throw new IllegalArgumentException("key not found") - } - - override def + [S >: T] (kv: (Int, S)): IntMap[S] = updated(kv._1, kv._2) - - override def updated[S >: T](key: Int, value: S): IntMap[S] = this match { - case IntMap.Bin(prefix, mask, left, right) => - if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this) - else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updated(key, value), right) - else IntMap.Bin(prefix, mask, left, right.updated(key, value)) - case IntMap.Tip(key2, value2) => - if (key == key2) IntMap.Tip(key, value) - else join(key, IntMap.Tip(key, value), key2, this) - case IntMap.Nil => IntMap.Tip(key, value) - } - - def map[V2](f: ((Int, T)) => (Int, V2)): IntMap[V2] = intMapFrom(new View.Map(this, f)) - - def flatMap[V2](f: ((Int, T)) => IterableOnce[(Int, V2)]): IntMap[V2] = intMapFrom(new View.FlatMap(this, f)) - - override def concat[V1 >: T](that: collection.IterableOnce[(Int, V1)]^): IntMap[V1] = - super.concat(that).asInstanceOf[IntMap[V1]] // Already has correct type but not declared as such - - override def ++ [V1 >: T](that: collection.IterableOnce[(Int, V1)]^): IntMap[V1] = concat(that) - - def collect[V2](pf: PartialFunction[(Int, T), (Int, V2)]): IntMap[V2] = - strictOptimizedCollect(IntMap.newBuilder[V2], pf) - - /** - * Updates the map, using the provided function to resolve conflicts if the key is already present. - * - * Equivalent to: - * {{{ - * this.get(key) match { - * case None => this.update(key, value) - * case Some(oldvalue) => this.update(key, f(oldvalue, value) - * } - * }}} - * - * @tparam S The supertype of values in this `LongMap`. - * @param key The key to update - * @param value The value to use if there is no conflict - * @param f The function used to resolve conflicts. - * @return The updated map. - */ - def updateWith[S >: T](key: Int, value: S, f: (T, S) => S): IntMap[S] = this match { - case IntMap.Bin(prefix, mask, left, right) => - if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this) - else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updateWith(key, value, f), right) - else IntMap.Bin(prefix, mask, left, right.updateWith(key, value, f)) - case IntMap.Tip(key2, value2) => - if (key == key2) IntMap.Tip(key, f(value2, value)) - else join(key, IntMap.Tip(key, value), key2, this) - case IntMap.Nil => IntMap.Tip(key, value) - } - - def removed (key: Int): IntMap[T] = this match { - case IntMap.Bin(prefix, mask, left, right) => - if (!hasMatch(key, prefix, mask)) this - else if (zero(key, mask)) bin(prefix, mask, left - key, right) - else bin(prefix, mask, left, right - key) - case IntMap.Tip(key2, _) => - if (key == key2) IntMap.Nil - else this - case IntMap.Nil => IntMap.Nil - } - - /** - * A combined transform and filter function. Returns an `IntMap` such that - * for each `(key, value)` mapping in this map, if `f(key, value) == None` - * the map contains no mapping for key, and if `f(key, value)`. - * - * @tparam S The type of the values in the resulting `LongMap`. - * @param f The transforming function. - * @return The modified map. - */ - def modifyOrRemove[S](f: (Int, T) => Option[S]): IntMap[S] = this match { - case IntMap.Bin(prefix, mask, left, right) => - val newleft = left.modifyOrRemove(f) - val newright = right.modifyOrRemove(f) - if ((left eq newleft) && (right eq newright)) this.asInstanceOf[IntMap[S]] - else bin(prefix, mask, newleft, newright) - case IntMap.Tip(key, value) => f(key, value) match { - case None => - IntMap.Nil - case Some(value2) => - //hack to preserve sharing - if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap[S]] - else IntMap.Tip(key, value2) - } - case IntMap.Nil => - IntMap.Nil - } - - /** - * Forms a union map with that map, using the combining function to resolve conflicts. - * - * @tparam S The type of values in `that`, a supertype of values in `this`. - * @param that The map to form a union with. - * @param f The function used to resolve conflicts between two mappings. - * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`. - */ - def unionWith[S >: T](that: IntMap[S], f: (Int, S, S) => S): IntMap[S] = (this, that) match{ - case (IntMap.Bin(p1, m1, l1, r1), that@(IntMap.Bin(p2, m2, l2, r2))) => - if (shorter(m1, m2)) { - if (!hasMatch(p2, p1, m1)) join(p1, this, p2, that) - else if (zero(p2, m1)) IntMap.Bin(p1, m1, l1.unionWith(that, f), r1) - else IntMap.Bin(p1, m1, l1, r1.unionWith(that, f)) - } else if (shorter(m2, m1)){ - if (!hasMatch(p1, p2, m2)) join(p1, this, p2, that) - else if (zero(p1, m2)) IntMap.Bin(p2, m2, this.unionWith(l2, f), r2) - else IntMap.Bin(p2, m2, l2, this.unionWith(r2, f)) - } - else { - if (p1 == p2) IntMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f)) - else join(p1, this, p2, that) - } - case (IntMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x)) - case (x, IntMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y)) - case (IntMap.Nil, x) => x - case (x, IntMap.Nil) => x - } - - /** - * Forms the intersection of these two maps with a combining function. The - * resulting map is a map that has only keys present in both maps and has - * values produced from the original mappings by combining them with `f`. - * - * @tparam S The type of values in `that`. - * @tparam R The type of values in the resulting `LongMap`. - * @param that The map to intersect with. - * @param f The combining function. - * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`. - */ - def intersectionWith[S, R](that: IntMap[S], f: (Int, T, S) => R): IntMap[R] = (this, that) match { - case (IntMap.Bin(p1, m1, l1, r1), that@IntMap.Bin(p2, m2, l2, r2)) => - if (shorter(m1, m2)) { - if (!hasMatch(p2, p1, m1)) IntMap.Nil - else if (zero(p2, m1)) l1.intersectionWith(that, f) - else r1.intersectionWith(that, f) - } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f)) - else { - if (!hasMatch(p1, p2, m2)) IntMap.Nil - else if (zero(p1, m2)) this.intersectionWith(l2, f) - else this.intersectionWith(r2, f) - } - case (IntMap.Tip(key, value), that) => that.get(key) match { - case None => IntMap.Nil - case Some(value2) => IntMap.Tip(key, f(key, value, value2)) - } - case (_, IntMap.Tip(key, value)) => this.get(key) match { - case None => IntMap.Nil - case Some(value2) => IntMap.Tip(key, f(key, value2, value)) - } - case (_, _) => IntMap.Nil - } - - /** - * Left biased intersection. Returns the map that has all the same mappings - * as this but only for keys which are present in the other map. - * - * @tparam R The type of values in `that`. - * @param that The map to intersect with. - * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`. - */ - def intersection[R](that: IntMap[R]): IntMap[T] = - this.intersectionWith(that, (key: Int, value: T, value2: R) => value) - - def ++[S >: T](that: IntMap[S]) = - this.unionWith[S](that, (key, x, y) => y) - - /** - * The entry with the lowest key value considered in unsigned order. - */ - @tailrec - final def firstKey: Int = this match { - case Bin(_, _, l, r) => l.firstKey - case Tip(k, v) => k - case IntMap.Nil => throw new IllegalStateException("Empty set") - } - - /** - * The entry with the highest key value considered in unsigned order. - */ - @tailrec - final def lastKey: Int = this match { - case Bin(_, _, l, r) => r.lastKey - case Tip(k, v) => k - case IntMap.Nil => throw new IllegalStateException("Empty set") - } - - protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(IntMap.toFactory[T](IntMap), this) -} diff --git a/tests/pos-special/stdlib/collection/immutable/Iterable.scala b/tests/pos-special/stdlib/collection/immutable/Iterable.scala index c4f9900eea8b..44f13d0f2895 100644 --- a/tests/pos-special/stdlib/collection/immutable/Iterable.scala +++ b/tests/pos-special/stdlib/collection/immutable/Iterable.scala @@ -32,7 +32,7 @@ trait Iterable[+A] extends collection.Iterable[A] @SerialVersionUID(3L) object Iterable extends IterableFactory.Delegate[Iterable](List) { - override def from[E](it: IterableOnce[E]^): Iterable[E]^{it} = it match { + override def from[E](it: IterableOnce[E]): Iterable[E] = it match { case iterable: Iterable[E] => iterable case _ => super.from(it) } diff --git a/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala b/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala deleted file mode 100644 index 5684130b6048..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala +++ /dev/null @@ -1,1376 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import java.io.{ObjectInputStream, ObjectOutputStream} -import java.lang.{StringBuilder => JStringBuilder} - -import scala.annotation.tailrec -import scala.collection.generic.SerializeEnd -import scala.collection.mutable.{Builder, ReusableBuilder, StringBuilder} -import scala.language.implicitConversions -import scala.runtime.Statics -import language.experimental.captureChecking -import annotation.unchecked.uncheckedCaptures - -/** This class implements an immutable linked list. We call it "lazy" - * because it computes its elements only when they are needed. - * - * The class extends Iterable; it is a replacement for LazyList, which - * which implemented Seq. The reason is that under capture checking, we - * assume that all Seqs are strict, and LazyList broke that assumption. - * As a consequence, we declare LazyList is deprecated and unsafe for - * capture checking, and replace it by the current class, LazyListIterable. - * - * Elements are memoized; that is, the value of each element is computed at most once. - * - * Elements are computed in-order and are never skipped. In other words, - * accessing the tail causes the head to be computed first. - * - * How lazy is a `LazyListIterable`? When you have a value of type `LazyListIterable`, you - * don't know yet whether the list is empty or not. If you learn that it is non-empty, - * then you also know that the head has been computed. But the tail is itself - * a `LazyListIterable`, whose emptiness-or-not might remain undetermined. - * - * A `LazyListIterable` may be infinite. For example, `LazyListIterable.from(0)` contains - * all of the natural numbers 0, 1, 2, and so on. For infinite sequences, - * some methods (such as `count`, `sum`, `max` or `min`) will not terminate. - * - * Here is an example: - * - * {{{ - * import scala.math.BigInt - * object Main extends App { - * val fibs: LazyListIterable[BigInt] = - * BigInt(0) #:: BigInt(1) #:: fibs.zip(fibs.tail).map{ n => n._1 + n._2 } - * fibs.take(5).foreach(println) - * } - * - * // prints - * // - * // 0 - * // 1 - * // 1 - * // 2 - * // 3 - * }}} - * - * To illustrate, let's add some output to the definition `fibs`, so we - * see what's going on. - * - * {{{ - * import scala.math.BigInt - * object Main extends App { - * val fibs: LazyListIterable[BigInt] = - * BigInt(0) #:: BigInt(1) #:: - * fibs.zip(fibs.tail).map{ n => - * println(s"Adding \${n._1} and \${n._2}") - * n._1 + n._2 - * } - * fibs.take(5).foreach(println) - * fibs.take(6).foreach(println) - * } - * - * // prints - * // - * // 0 - * // 1 - * // Adding 0 and 1 - * // 1 - * // Adding 1 and 1 - * // 2 - * // Adding 1 and 2 - * // 3 - * - * // And then prints - * // - * // 0 - * // 1 - * // 1 - * // 2 - * // 3 - * // Adding 2 and 3 - * // 5 - * }}} - * - * Note that the definition of `fibs` uses `val` not `def`. The memoization of the - * `LazyListIterable` requires us to have somewhere to store the information and a `val` - * allows us to do that. - * - * Further remarks about the semantics of `LazyListIterable`: - * - * - Though the `LazyListIterable` changes as it is accessed, this does not - * contradict its immutability. Once the values are memoized they do - * not change. Values that have yet to be memoized still "exist", they - * simply haven't been computed yet. - * - * - One must be cautious of memoization; it can eat up memory if you're not - * careful. That's because memoization of the `LazyListIterable` creates a structure much like - * [[scala.collection.immutable.List]]. As long as something is holding on to - * the head, the head holds on to the tail, and so on recursively. - * If, on the other hand, there is nothing holding on to the head (e.g. if we used - * `def` to define the `LazyListIterable`) then once it is no longer being used directly, - * it disappears. - * - * - Note that some operations, including [[drop]], [[dropWhile]], - * [[flatMap]] or [[collect]] may process a large number of intermediate - * elements before returning. - * - * Here's another example. Let's start with the natural numbers and iterate - * over them. - * - * {{{ - * // We'll start with a silly iteration - * def loop(s: String, i: Int, iter: Iterator[Int]): Unit = { - * // Stop after 200,000 - * if (i < 200001) { - * if (i % 50000 == 0) println(s + i) - * loop(s, iter.next(), iter) - * } - * } - * - * // Our first LazyListIterable definition will be a val definition - * val lazylist1: LazyListIterable[Int] = { - * def loop(v: Int): LazyListIterable[Int] = v #:: loop(v + 1) - * loop(0) - * } - * - * // Because lazylist1 is a val, everything that the iterator produces is held - * // by virtue of the fact that the head of the LazyListIterable is held in lazylist1 - * val it1 = lazylist1.iterator - * loop("Iterator1: ", it1.next(), it1) - * - * // We can redefine this LazyListIterable such that all we have is the Iterator left - * // and allow the LazyListIterable to be garbage collected as required. Using a def - * // to provide the LazyListIterable ensures that no val is holding onto the head as - * // is the case with lazylist1 - * def lazylist2: LazyListIterable[Int] = { - * def loop(v: Int): LazyListIterable[Int] = v #:: loop(v + 1) - * loop(0) - * } - * val it2 = lazylist2.iterator - * loop("Iterator2: ", it2.next(), it2) - * - * // And, of course, we don't actually need a LazyListIterable at all for such a simple - * // problem. There's no reason to use a LazyListIterable if you don't actually need - * // one. - * val it3 = new Iterator[Int] { - * var i = -1 - * def hasNext = true - * def next(): Int = { i += 1; i } - * } - * loop("Iterator3: ", it3.next(), it3) - * }}} - * - * - In the `fibs` example earlier, the fact that `tail` works at all is of interest. - * `fibs` has an initial `(0, 1, LazyListIterable(...))`, so `tail` is deterministic. - * If we defined `fibs` such that only `0` were concretely known, then the act - * of determining `tail` would require the evaluation of `tail`, so the - * computation would be unable to progress, as in this code: - * {{{ - * // The first time we try to access the tail we're going to need more - * // information which will require us to recurse, which will require us to - * // recurse, which... - * lazy val sov: LazyListIterable[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 } - * }}} - * - * The definition of `fibs` above creates a larger number of objects than - * necessary depending on how you might want to implement it. The following - * implementation provides a more "cost effective" implementation due to the - * fact that it has a more direct route to the numbers themselves: - * - * {{{ - * lazy val fib: LazyListIterable[Int] = { - * def loop(h: Int, n: Int): LazyListIterable[Int] = h #:: loop(n, h + n) - * loop(1, 1) - * } - * }}} - * - * The head, the tail and whether the list is empty or not can be initially unknown. - * Once any of those are evaluated, they are all known, though if the tail is - * built with `#::` or `#:::`, it's content still isn't evaluated. Instead, evaluating - * the tails content is deferred until the tails empty status, head or tail is - * evaluated. - * - * Delaying the evaluation of whether a LazyListIterable is empty or not until it's needed - * allows LazyListIterable to not eagerly evaluate any elements on a call to `filter`. - * - * Only when it's further evaluated (which may be never!) any of the elements gets - * forced. - * - * for example: - * - * {{{ - * def tailWithSideEffect: LazyListIterable[Nothing] = { - * println("getting empty LazyListIterable") - * LazyListIterable.empty - * } - * - * val emptyTail = tailWithSideEffect // prints "getting empty LazyListIterable" - * - * val suspended = 1 #:: tailWithSideEffect // doesn't print anything - * val tail = suspended.tail // although the tail is evaluated, *still* nothing is yet printed - * val filtered = tail.filter(_ => false) // still nothing is printed - * filtered.isEmpty // prints "getting empty LazyListIterable" - * }}} - * - * @tparam A the type of the elements contained in this lazy list. - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#lazylists "Scala's Collection Library overview"]] - * section on `LazyLists` for more information. - * @define Coll `LazyListIterable` - * @define coll lazy list - * @define orderDependent - * @define orderDependentFold - * @define appendStackSafety Note: Repeated chaining of calls to append methods (`appended`, - * `appendedAll`, `lazyAppendedAll`) without forcing any of the - * intermediate resulting lazy lists may overflow the stack when - * the final result is forced. - * @define preservesLaziness This method preserves laziness; elements are only evaluated - * individually as needed. - * @define initiallyLazy This method does not evaluate anything until an operation is performed - * on the result (e.g. calling `head` or `tail`, or checking if it is empty). - * @define evaluatesAllElements This method evaluates all elements of the collection. - */ -@SerialVersionUID(3L) -final class LazyListIterable[+A] private(private[this] var lazyState: () => LazyListIterable.State[A]^) - extends AbstractIterable[A] - with Iterable[A] - with IterableOps[A, LazyListIterable, LazyListIterable[A]] - with IterableFactoryDefaults[A, LazyListIterable] - with Serializable { - this: LazyListIterable[A]^ => - import LazyListIterable._ - - @volatile private[this] var stateEvaluated: Boolean = false - @inline private def stateDefined: Boolean = stateEvaluated - private[this] var midEvaluation = false - - private lazy val state: State[A]^ = { - // if it's already mid-evaluation, we're stuck in an infinite - // self-referential loop (also it's empty) - if (midEvaluation) { - throw new RuntimeException("self-referential LazyListIterable or a derivation thereof has no more elements") - } - midEvaluation = true - val res = try lazyState() finally midEvaluation = false - // if we set it to `true` before evaluating, we may infinite loop - // if something expects `state` to already be evaluated - stateEvaluated = true - lazyState = null // allow GC - res - } - - override def iterableFactory: IterableFactory[LazyListIterable] = LazyListIterable - - override def isEmpty: Boolean = state eq State.Empty - - /** @inheritdoc - * - * $preservesLaziness - */ - override def knownSize: Int = if (knownIsEmpty) 0 else -1 - - override def head: A = state.head - - override def tail: LazyListIterable[A]^{this} = state.tail - - @inline private[this] def knownIsEmpty: Boolean = stateEvaluated && (isEmpty: @inline) - @inline private def knownNonEmpty: Boolean = stateEvaluated && !(isEmpty: @inline) - - /** Evaluates all undefined elements of the lazy list. - * - * This method detects cycles in lazy lists, and terminates after all - * elements of the cycle are evaluated. For example: - * - * {{{ - * val ring: LazyListIterable[Int] = 1 #:: 2 #:: 3 #:: ring - * ring.force - * ring.toString - * - * // prints - * // - * // LazyListIterable(1, 2, 3, ...) - * }}} - * - * This method will *not* terminate for non-cyclic infinite-sized collections. - * - * @return this - */ - def force: this.type = { - // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) - var these, those: LazyListIterable[A]^{this} = this - if (!these.isEmpty) { - these = these.tail - } - while (those ne these) { - if (these.isEmpty) return this - these = these.tail - if (these.isEmpty) return this - these = these.tail - if (these eq those) return this - those = those.tail - } - this - } - - /** @inheritdoc - * - * The iterator returned by this method preserves laziness; elements are - * only evaluated individually as needed. - */ - override def iterator: Iterator[A]^{this} = - if (knownIsEmpty) Iterator.empty - else new LazyIterator(this) - - /** Apply the given function `f` to each element of this linear sequence - * (while respecting the order of the elements). - * - * @param f The treatment to apply to each element. - * @note Overridden here as final to trigger tail-call optimization, which - * replaces 'this' with 'tail' at each iteration. This is absolutely - * necessary for allowing the GC to collect the underlying LazyListIterable as elements - * are consumed. - * @note This function will force the realization of the entire LazyListIterable - * unless the `f` throws an exception. - */ - @tailrec - override def foreach[U](f: A => U): Unit = { - if (!isEmpty) { - f(head) - tail.foreach(f) - } - } - - /** LazyListIterable specialization of foldLeft which allows GC to collect along the - * way. - * - * @tparam B The type of value being accumulated. - * @param z The initial value seeded into the function `op`. - * @param op The operation to perform on successive elements of the `LazyListIterable`. - * @return The accumulated value from successive applications of `op`. - */ - @tailrec - override def foldLeft[B](z: B)(op: (B, A) => B): B = - if (isEmpty) z - else tail.foldLeft(op(z, head))(op) - - // State.Empty doesn't use the SerializationProxy - protected[this] def writeReplace(): AnyRef^{this} = - if (knownNonEmpty) new LazyListIterable.SerializationProxy[A](this) else this - - override protected[this] def className = "LazyListIterable" - - /** The lazy list resulting from the concatenation of this lazy list with the argument lazy list. - * - * $preservesLaziness - * - * $appendStackSafety - * - * @param suffix The collection that gets appended to this lazy list - * @return The lazy list containing elements of this lazy list and the iterable object. - */ - def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]^): LazyListIterable[B]^{this, suffix} = - newLL { - if (isEmpty) suffix match { - case lazyList: LazyListIterable[B] => lazyList.state // don't recompute the LazyListIterable - case coll if coll.knownSize == 0 => State.Empty - case coll => stateFromIterator(coll.iterator) - } - else sCons(head, tail lazyAppendedAll suffix) - } - - /** @inheritdoc - * - * $preservesLaziness - * - * $appendStackSafety - */ - def appendedAll[B >: A](suffix: IterableOnce[B]^): LazyListIterable[B]^{this, suffix} = - if (knownIsEmpty) LazyListIterable.from(suffix) - else lazyAppendedAll(suffix) - - /** @inheritdoc - * - * $preservesLaziness - * - * $appendStackSafety - */ - def appended[B >: A](elem: B): LazyListIterable[B]^{this} = - if (knownIsEmpty) newLL(sCons(elem, LazyListIterable.empty)) - else lazyAppendedAll(Iterator.single(elem)) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def scanLeft[B](z: B)(op: (B, A) => B): LazyListIterable[B]^{this, op} = - if (knownIsEmpty) newLL(sCons(z, LazyListIterable.empty)) - else newLL(scanLeftState(z)(op)) - - private def scanLeftState[B](z: B)(op: (B, A) => B): State[B]^{this, op} = - sCons( - z, - newLL { - if (isEmpty) State.Empty - else tail.scanLeftState(op(z, head))(op) - } - ) - - /** LazyListIterable specialization of reduceLeft which allows GC to collect - * along the way. - * - * @tparam B The type of value being accumulated. - * @param f The operation to perform on successive elements of the `LazyListIterable`. - * @return The accumulated value from successive applications of `f`. - */ - override def reduceLeft[B >: A](f: (B, A) => B): B = { - if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") - else { - var reducedRes: B = this.head - var left: LazyListIterable[A]^{this} = this.tail - while (!left.isEmpty) { - reducedRes = f(reducedRes, left.head) - left = left.tail - } - reducedRes - } - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def partition(p: A => Boolean): (LazyListIterable[A]^{this, p}, LazyListIterable[A]^{this, p}) = (filter(p), filterNot(p)) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def partitionMap[A1, A2](f: A => Either[A1, A2]): (LazyListIterable[A1]^{this, f}, LazyListIterable[A2]^{this, f}) = { - val (left, right) = map(f).partition(_.isLeft) - (left.map(_.asInstanceOf[Left[A1, _]].value), right.map(_.asInstanceOf[Right[_, A2]].value)) - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def filter(pred: A => Boolean): LazyListIterable[A]^{this, pred} = - if (knownIsEmpty) LazyListIterable.empty - else LazyListIterable.filterImpl(this, pred, isFlipped = false) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def filterNot(pred: A => Boolean): LazyListIterable[A]^{this, pred} = - if (knownIsEmpty) LazyListIterable.empty - else LazyListIterable.filterImpl(this, pred, isFlipped = true) - - /** A `collection.WithFilter` which allows GC of the head of lazy list during processing. - * - * This method is not particularly useful for a lazy list, as [[filter]] already preserves - * laziness. - * - * The `collection.WithFilter` returned by this method preserves laziness; elements are - * only evaluated individually as needed. - */ - override def withFilter(p: A => Boolean): collection.WithFilter[A, LazyListIterable]^{this, p} = - new LazyListIterable.WithFilter(coll, p) - - /** @inheritdoc - * - * $preservesLaziness - */ - def prepended[B >: A](elem: B): LazyListIterable[B] = newLL(sCons(elem, this)) - - /** @inheritdoc - * - * $preservesLaziness - */ - def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): LazyListIterable[B]^{this, prefix} = - if (knownIsEmpty) LazyListIterable.from(prefix) - else if (prefix.knownSize == 0) this - else newLL(stateFromIteratorConcatSuffix(prefix.iterator)(state)) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def map[B](f: A => B): LazyListIterable[B]^{this, f} = - if (knownIsEmpty) LazyListIterable.empty - else (mapImpl(f): @inline) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def tapEach[U](f: A => U): LazyListIterable[A]^{this, f} = map { a => f(a); a } - - private def mapImpl[B](f: A => B): LazyListIterable[B]^{this, f} = - newLL { - if (isEmpty) State.Empty - else sCons(f(head), tail.mapImpl(f)) - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def collect[B](pf: PartialFunction[A, B]^): LazyListIterable[B]^{this, pf} = - if (knownIsEmpty) LazyListIterable.empty - else LazyListIterable.collectImpl(this, pf) - - /** @inheritdoc - * - * This method does not evaluate any elements further than - * the first element for which the partial function is defined. - */ - @tailrec - override def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = - if (isEmpty) None - else { - val res = pf.applyOrElse(head, LazyListIterable.anyToMarker.asInstanceOf[A => B]) - if (res.asInstanceOf[AnyRef] eq Statics.pfMarker) tail.collectFirst(pf) - else Some(res) - } - - /** @inheritdoc - * - * This method does not evaluate any elements further than - * the first element matching the predicate. - */ - @tailrec - override def find(p: A => Boolean): Option[A] = - if (isEmpty) None - else { - val elem = head - if (p(elem)) Some(elem) - else tail.find(p) - } - - /** @inheritdoc - * - * $preservesLaziness - */ - // optimisations are not for speed, but for functionality - // see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala) - override def flatMap[B](f: A => IterableOnce[B]^): LazyListIterable[B]^{this, f} = - if (knownIsEmpty) LazyListIterable.empty - else LazyListIterable.flatMapImpl(this, f) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def flatten[B](implicit asIterable: A -> IterableOnce[B]): LazyListIterable[B]^{this} = flatMap(asIterable) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def zip[B](that: collection.IterableOnce[B]^): LazyListIterable[(A, B)]^{this, that} = - if (this.knownIsEmpty || that.knownSize == 0) LazyListIterable.empty - else newLL(zipState(that.iterator)) - - private def zipState[B](it: Iterator[B]^): State[(A, B)]^{this, it} = - if (this.isEmpty || !it.hasNext) State.Empty - else sCons((head, it.next()), newLL { tail zipState it }) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def zipWithIndex: LazyListIterable[(A, Int)]^{this} = this zip LazyListIterable.from(0) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def zipAll[A1 >: A, B](that: collection.Iterable[B]^, thisElem: A1, thatElem: B): LazyListIterable[(A1, B)]^{this, that} = { - if (this.knownIsEmpty) { - if (that.knownSize == 0) LazyListIterable.empty - else LazyListIterable.continually(thisElem) zip that - } else { - if (that.knownSize == 0) zip(LazyListIterable.continually(thatElem)) - else newLL(zipAllState(that.iterator, thisElem, thatElem)) - } - } - - private def zipAllState[A1 >: A, B](it: Iterator[B]^, thisElem: A1, thatElem: B): State[(A1, B)]^{this, it} = { - if (it.hasNext) { - if (this.isEmpty) sCons((thisElem, it.next()), newLL { LazyListIterable.continually(thisElem) zipState it }) - else sCons((this.head, it.next()), newLL { this.tail.zipAllState(it, thisElem, thatElem) }) - } else { - if (this.isEmpty) State.Empty - else sCons((this.head, thatElem), this.tail zip LazyListIterable.continually(thatElem)) - } - } - - /** @inheritdoc - * - * This method is not particularly useful for a lazy list, as [[zip]] already preserves - * laziness. - * - * The `collection.LazyZip2` returned by this method preserves laziness; elements are - * only evaluated individually as needed. - */ - // just in case it can be meaningfully overridden at some point - override def lazyZip[B](that: collection.Iterable[B]^): LazyZip2[A, B, LazyListIterable.this.type]^{this, that} = - super.lazyZip(that) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def unzip[A1, A2](implicit asPair: A -> (A1, A2)): (LazyListIterable[A1]^{this}, LazyListIterable[A2]^{this}) = - (map(asPair(_)._1), map(asPair(_)._2)) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def unzip3[A1, A2, A3](implicit asTriple: A -> (A1, A2, A3)): (LazyListIterable[A1]^{this}, LazyListIterable[A2]^{this}, LazyListIterable[A3]^{this}) = - (map(asTriple(_)._1), map(asTriple(_)._2), map(asTriple(_)._3)) - - /** @inheritdoc - * - * $initiallyLazy - * Additionally, it preserves laziness for all except the first `n` elements. - */ - override def drop(n: Int): LazyListIterable[A]^{this} = - if (n <= 0) this - else if (knownIsEmpty) LazyListIterable.empty - else LazyListIterable.dropImpl(this, n) - - /** @inheritdoc - * - * $initiallyLazy - * Additionally, it preserves laziness for all elements after the predicate returns `false`. - */ - override def dropWhile(p: A => Boolean): LazyListIterable[A]^{this, p} = - if (knownIsEmpty) LazyListIterable.empty - else LazyListIterable.dropWhileImpl(this, p) - - /** @inheritdoc - * - * $initiallyLazy - */ - override def dropRight(n: Int): LazyListIterable[A]^{this} = { - if (n <= 0) this - else if (knownIsEmpty) LazyListIterable.empty - else newLL { - var scout = this - var remaining = n - // advance scout n elements ahead (or until empty) - while (remaining > 0 && !scout.isEmpty) { - remaining -= 1 - scout = scout.tail - } - dropRightState(scout) - } - } - - private def dropRightState(scout: LazyListIterable[_]^): State[A]^{this, scout} = - if (scout.isEmpty) State.Empty - else sCons(head, newLL(tail.dropRightState(scout.tail))) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def take(n: Int): LazyListIterable[A] = - if (knownIsEmpty) LazyListIterable.empty - else (takeImpl(n): @inline) - - private def takeImpl(n: Int): LazyListIterable[A] = { - if (n <= 0) LazyListIterable.empty - else newLL { - if (isEmpty) State.Empty - else sCons(head, tail.takeImpl(n - 1)) - } - } - - /** @inheritdoc - * - * $preservesLaziness - */ - override def takeWhile(p: A => Boolean): LazyListIterable[A]^{this, p} = - if (knownIsEmpty) LazyListIterable.empty - else (takeWhileImpl(p): @inline) - - private def takeWhileImpl(p: A => Boolean): LazyListIterable[A]^{this, p} = - newLL { - if (isEmpty || !p(head)) State.Empty - else sCons(head, tail.takeWhileImpl(p)) - } - - /** @inheritdoc - * - * $initiallyLazy - */ - override def takeRight(n: Int): LazyListIterable[A]^{this} = - if (n <= 0 || knownIsEmpty) LazyListIterable.empty - else LazyListIterable.takeRightImpl(this, n) - - /** @inheritdoc - * - * $initiallyLazy - * Additionally, it preserves laziness for all but the first `from` elements. - */ - override def slice(from: Int, until: Int): LazyListIterable[A]^{this} = take(until).drop(from) - - /** @inheritdoc - * - * $evaluatesAllElements - */ - def reverse: LazyListIterable[A] = reverseOnto(LazyListIterable.empty) - - // need contravariant type B to make the compiler happy - still returns LazyListIterable[A] - @tailrec - private def reverseOnto[B >: A](tl: LazyListIterable[B]): LazyListIterable[B] = - if (isEmpty) tl - else tail.reverseOnto(newLL(sCons(head, tl))) - - @tailrec - private def lengthGt(len: Int): Boolean = - if (len < 0) true - else if (isEmpty) false - else tail.lengthGt(len - 1) - - /** @inheritdoc - * - * The iterator returned by this method mostly preserves laziness; - * a single element ahead of the iterator is evaluated. - */ - override def grouped(size: Int): Iterator[LazyListIterable[A]] = { - require(size > 0, "size must be positive, but was " + size) - slidingImpl(size = size, step = size) - } - - /** @inheritdoc - * - * The iterator returned by this method mostly preserves laziness; - * `size - step max 1` elements ahead of the iterator are evaluated. - */ - override def sliding(size: Int, step: Int): Iterator[LazyListIterable[A]] = { - require(size > 0 && step > 0, s"size=$size and step=$step, but both must be positive") - slidingImpl(size = size, step = step) - } - - @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyListIterable[A]] = - if (knownIsEmpty) Iterator.empty - else new SlidingIterator[A](this, size = size, step = step) - - /** @inheritdoc - * - * $preservesLaziness - */ - def padTo[B >: A](len: Int, elem: B): LazyListIterable[B]^{this} = { - if (len <= 0) this - else newLL { - if (isEmpty) LazyListIterable.fill(len)(elem).state - else sCons(head, tail.padTo(len - 1, elem)) - } - } - - /** @inheritdoc - * - * $preservesLaziness - */ - def patch[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): LazyListIterable[B]^{this, other} = - if (knownIsEmpty) LazyListIterable from other - else patchImpl(from, other, replaced) - - private def patchImpl[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): LazyListIterable[B]^{this, other} = - newLL { - if (from <= 0) stateFromIteratorConcatSuffix(other.iterator)(LazyListIterable.dropImpl(this, replaced).state) - else if (isEmpty) stateFromIterator(other.iterator) - else sCons(head, tail.patchImpl(from - 1, other, replaced)) - } - - /** @inheritdoc - * - * $evaluatesAllElements - */ - // overridden just in case a lazy implementation is developed at some point - override def transpose[B](implicit asIterable: A -> collection.Iterable[B]): LazyListIterable[LazyListIterable[B]]^{this} = super.transpose - - /** @inheritdoc - * - * $preservesLaziness - */ - def updated[B >: A](index: Int, elem: B): LazyListIterable[B]^{this} = - if (index < 0) throw new IndexOutOfBoundsException(s"$index") - else updatedImpl(index, elem, index) - - private def updatedImpl[B >: A](index: Int, elem: B, startIndex: Int): LazyListIterable[B]^{this} = { - newLL { - if (index <= 0) sCons(elem, tail) - else if (tail.isEmpty) throw new IndexOutOfBoundsException(startIndex.toString) - else sCons(head, tail.updatedImpl(index - 1, elem, startIndex)) - } - } - - /** Appends all elements of this $coll to a string builder using start, end, and separator strings. - * The written text begins with the string `start` and ends with the string `end`. - * Inside, the string representations (w.r.t. the method `toString`) - * of all elements of this $coll are separated by the string `sep`. - * - * An undefined state is represented with `"<not computed>"` and cycles are represented with `"<cycle>"`. - * - * $evaluatesAllElements - * - * @param sb the string builder to which elements are appended. - * @param start the starting string. - * @param sep the separator string. - * @param end the ending string. - * @return the string builder `b` to which elements were appended. - */ - override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { - force - addStringNoForce(sb.underlying, start, sep, end) - sb - } - - private[this] def addStringNoForce(b: JStringBuilder, start: String, sep: String, end: String): JStringBuilder = { - b.append(start) - if (!stateDefined) b.append("") - else if (!isEmpty) { - b.append(head) - var cursor = this - inline def appendCursorElement(): Unit = b.append(sep).append(cursor.head) - var scout = tail - inline def scoutNonEmpty: Boolean = scout.stateDefined && !scout.isEmpty - if ((cursor ne scout) && (!scout.stateDefined || (cursor.state ne scout.state))) { - cursor = scout - if (scoutNonEmpty) { - scout = scout.tail - // Use 2x 1x iterator trick for cycle detection; slow iterator can add strings - while ((cursor ne scout) && scoutNonEmpty && (cursor.state ne scout.state)) { - appendCursorElement() - cursor = cursor.tail - scout = scout.tail - if (scoutNonEmpty) scout = scout.tail - } - } - } - if (!scoutNonEmpty) { // Not a cycle, scout hit an end - while (cursor ne scout) { - appendCursorElement() - cursor = cursor.tail - } - // if cursor (eq scout) has state defined, it is empty; else unknown state - if (!cursor.stateDefined) b.append(sep).append("") - } else { - @inline def same(a: LazyListIterable[A]^, b: LazyListIterable[A]^): Boolean = (a eq b) || (a.state eq b.state) - // Cycle. - // If we have a prefix of length P followed by a cycle of length C, - // the scout will be at position (P%C) in the cycle when the cursor - // enters it at P. They'll then collide when the scout advances another - // C - (P%C) ahead of the cursor. - // If we run the scout P farther, then it will be at the start of - // the cycle: (C - (P%C) + (P%C)) == C == 0. So if another runner - // starts at the beginning of the prefix, they'll collide exactly at - // the start of the loop. - var runner = this - var k = 0 - while (!same(runner, scout)) { - runner = runner.tail - scout = scout.tail - k += 1 - } - // Now runner and scout are at the beginning of the cycle. Advance - // cursor, adding to string, until it hits; then we'll have covered - // everything once. If cursor is already at beginning, we'd better - // advance one first unless runner didn't go anywhere (in which case - // we've already looped once). - if (same(cursor, scout) && (k > 0)) { - appendCursorElement() - cursor = cursor.tail - } - while (!same(cursor, scout)) { - appendCursorElement() - cursor = cursor.tail - } - b.append(sep).append("") - } - } - b.append(end) - } - - /** $preservesLaziness - * - * @return a string representation of this collection. An undefined state is - * represented with `"<not computed>"` and cycles are represented with `"<cycle>"` - * - * Examples: - * - * - `"LazyListIterable(4, <not computed>)"`, a non-empty lazy list ; - * - `"LazyListIterable(1, 2, 3, <not computed>)"`, a lazy list with at least three elements ; - * - `"LazyListIterable(1, 2, 3, <cycle>)"`, an infinite lazy list that contains - * a cycle at the fourth element. - */ - override def toString(): String = addStringNoForce(new JStringBuilder(className), "(", ", ", ")").toString - - /** @inheritdoc - * - * $preservesLaziness - */ - @deprecated("Check .knownSize instead of .hasDefiniteSize for more actionable information (see scaladoc for details)", "2.13.0") - override def hasDefiniteSize: Boolean = { - if (!stateDefined) false - else if (isEmpty) true - else { - // Two-iterator trick (2x & 1x speed) for cycle detection. - var those = this - var these = tail - while (those ne these) { - if (!these.stateDefined) return false - else if (these.isEmpty) return true - these = these.tail - if (!these.stateDefined) return false - else if (these.isEmpty) return true - these = these.tail - if (those eq these) return false - those = those.tail - } - false // Cycle detected - } - } -} - -/** - * $factoryInfo - * @define coll lazy list - * @define Coll `LazyListIterable` - */ -@SerialVersionUID(3L) -object LazyListIterable extends IterableFactory[LazyListIterable] { - // Eagerly evaluate cached empty instance - private[this] val _empty = newLL(State.Empty).force - - private sealed trait State[+A] extends Serializable { - this: State[A]^ => - def head: A - def tail: LazyListIterable[A]^ - } - - private object State { - @SerialVersionUID(3L) - object Empty extends State[Nothing] { - def head: Nothing = throw new NoSuchElementException("head of empty lazy list") - def tail: LazyListIterable[Nothing] = throw new UnsupportedOperationException("tail of empty lazy list") - } - - @SerialVersionUID(3L) - final class Cons[A](val head: A, val tail: LazyListIterable[A]^) extends State[A] - } - - /** Creates a new LazyListIterable. */ - @inline private def newLL[A](state: => State[A]^): LazyListIterable[A]^{state} = new LazyListIterable[A](() => state) - - /** Creates a new State.Cons. */ - @inline private def sCons[A](hd: A, tl: LazyListIterable[A]^): State[A]^{tl} = new State.Cons[A](hd, tl) - - private val anyToMarker: Any => Any = _ => Statics.pfMarker - - /* All of the following `Impl` methods are carefully written so as not to - * leak the beginning of the `LazyListIterable`. They copy the initial `LazyListIterable` (`ll`) into - * `var rest`, which gets closed over as a `scala.runtime.ObjectRef`, thus not permanently - * leaking the head of the `LazyListIterable`. Additionally, the methods are written so that, should - * an exception be thrown by the evaluation of the `LazyListIterable` or any supplied function, they - * can continue their execution where they left off. - */ - - private def filterImpl[A](ll: LazyListIterable[A]^, p: A => Boolean, isFlipped: Boolean): LazyListIterable[A]^{ll, p} = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[filterImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric - newLL { - var elem: A = null.asInstanceOf[A] - var found = false - var rest = restRef // var rest = restRef.elem - while (!found && !rest.isEmpty) { - elem = rest.head - found = p(elem) != isFlipped - rest = rest.tail - restRef = rest // restRef.elem = rest - } - if (found) sCons(elem, filterImpl(rest, p, isFlipped)) else State.Empty - } - } - - private def collectImpl[A, B](ll: LazyListIterable[A]^, pf: PartialFunction[A, B]^): LazyListIterable[B]^{ll, pf} = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[collectImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric - newLL { - val marker = Statics.pfMarker - val toMarker = anyToMarker.asInstanceOf[A => B] // safe because Function1 is erased - - var res: B = marker.asInstanceOf[B] // safe because B is unbounded - var rest = restRef // var rest = restRef.elem - while((res.asInstanceOf[AnyRef] eq marker) && !rest.isEmpty) { - res = pf.applyOrElse(rest.head, toMarker) - rest = rest.tail - restRef = rest // restRef.elem = rest - } - if (res.asInstanceOf[AnyRef] eq marker) State.Empty - else sCons(res, collectImpl(rest, pf)) - } - } - - private def flatMapImpl[A, B](ll: LazyListIterable[A]^, f: A => IterableOnce[B]^): LazyListIterable[B]^{ll, f} = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[flatMapImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric - newLL { - var it: Iterator[B @uncheckedCaptures]^{ll, f} = null - var itHasNext = false - var rest = restRef // var rest = restRef.elem - while (!itHasNext && !rest.isEmpty) { - it = f(rest.head).iterator - itHasNext = it.hasNext - if (!itHasNext) { // wait to advance `rest` because `it.next()` can throw - rest = rest.tail - restRef = rest // restRef.elem = rest - } - } - if (itHasNext) { - val head = it.next() - rest = rest.tail - restRef = rest // restRef.elem = rest - sCons(head, newLL(stateFromIteratorConcatSuffix(it)(flatMapImpl(rest, f).state))) - } else State.Empty - } - } - - private def dropImpl[A](ll: LazyListIterable[A]^, n: Int): LazyListIterable[A]^{ll} = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[dropImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric - var iRef = n // val iRef = new IntRef(n) - newLL { - var rest = restRef // var rest = restRef.elem - var i = iRef // var i = iRef.elem - while (i > 0 && !rest.isEmpty) { - rest = rest.tail - restRef = rest // restRef.elem = rest - i -= 1 - iRef = i // iRef.elem = i - } - rest.state - } - } - - private def dropWhileImpl[A](ll: LazyListIterable[A]^, p: A => Boolean): LazyListIterable[A]^{ll, p} = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[dropWhileImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric - newLL { - var rest = restRef // var rest = restRef.elem - while (!rest.isEmpty && p(rest.head)) { - rest = rest.tail - restRef = rest // restRef.elem = rest - } - rest.state - } - } - - private def takeRightImpl[A](ll: LazyListIterable[A]^, n: Int): LazyListIterable[A]^{ll} = { - // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[takeRightImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric - var scoutRef: LazyListIterable[A @uncheckedCaptures]^{cap[takeRightImpl]} = ll // same situation - var remainingRef = n // val remainingRef = new IntRef(n) - newLL { - var scout = scoutRef // var scout = scoutRef.elem - var remaining = remainingRef // var remaining = remainingRef.elem - // advance `scout` `n` elements ahead (or until empty) - while (remaining > 0 && !scout.isEmpty) { - scout = scout.tail - scoutRef = scout // scoutRef.elem = scout - remaining -= 1 - remainingRef = remaining // remainingRef.elem = remaining - } - var rest = restRef // var rest = restRef.elem - // advance `rest` and `scout` in tandem until `scout` reaches the end - while(!scout.isEmpty) { - scout = scout.tail - scoutRef = scout // scoutRef.elem = scout - rest = rest.tail // can't throw an exception as `scout` has already evaluated its tail - restRef = rest // restRef.elem = rest - } - // `rest` is the last `n` elements (or all of them) - rest.state - } - } - - /** An alternative way of building and matching lazy lists using LazyListIterable.cons(hd, tl). - */ - object cons { - /** A lazy list consisting of a given first element and remaining elements - * @param hd The first element of the result lazy list - * @param tl The remaining elements of the result lazy list - */ - def apply[A](hd: => A, tl: => LazyListIterable[A]^): LazyListIterable[A]^{hd, tl} = newLL(sCons(hd, newLL(tl.state))) - - /** Maps a lazy list to its head and tail */ - def unapply[A](xs: LazyListIterable[A]^): Option[(A, LazyListIterable[A]^{xs})] = #::.unapply(xs) - } - - extension [A](l: => LazyListIterable[A]) - /** Construct a LazyListIterable consisting of a given first element followed by elements - * from another LazyListIterable. - */ - def #:: [B >: A](elem: => B): LazyListIterable[B]^{elem, l} = newLL(sCons(elem, newLL(l.state))) - - /** Construct a LazyListIterable consisting of the concatenation of the given LazyListIterable and - * another LazyListIterable. - */ - def #:::[B >: A](prefix: LazyListIterable[B]^): LazyListIterable[B]^{prefix, l} = prefix lazyAppendedAll l - - object #:: { - def unapply[A](s: LazyListIterable[A]^): Option[(A, LazyListIterable[A]^{s})] = - if (!s.isEmpty) Some((s.head, s.tail)) else None - } - - def from[A](coll: collection.IterableOnce[A]^): LazyListIterable[A]^{coll} = coll match { - case lazyList: LazyListIterable[A] => lazyList - case _ if coll.knownSize == 0 => empty[A] - case _ => newLL(stateFromIterator(coll.iterator)) - } - - def empty[A]: LazyListIterable[A] = _empty - - /** Creates a State from an Iterator, with another State appended after the Iterator - * is empty. - */ - private def stateFromIteratorConcatSuffix[A](it: Iterator[A]^)(suffix: => State[A]^): State[A]^{it, suffix} = - if (it.hasNext) sCons(it.next(), newLL(stateFromIteratorConcatSuffix(it)(suffix))) - else suffix - - /** Creates a State from an IterableOnce. */ - private def stateFromIterator[A](it: Iterator[A]^): State[A]^{it} = - if (it.hasNext) sCons(it.next(), newLL(stateFromIterator(it))) - else State.Empty - - override def concat[A](xss: collection.Iterable[A]*): LazyListIterable[A] = - if (xss.knownSize == 0) empty - else newLL(concatIterator(xss.iterator)) - - private def concatIterator[A](it: Iterator[collection.Iterable[A]]^): State[A]^{it} = - if (!it.hasNext) State.Empty - else stateFromIteratorConcatSuffix(it.next().iterator)(concatIterator(it)) - - /** An infinite LazyListIterable that repeatedly applies a given function to a start value. - * - * @param start the start value of the LazyListIterable - * @param f the function that's repeatedly applied - * @return the LazyListIterable returning the infinite sequence of values `start, f(start), f(f(start)), ...` - */ - def iterate[A](start: => A)(f: A => A): LazyListIterable[A]^{start, f} = - newLL { - val head = start - sCons(head, iterate(f(head))(f)) - } - - /** - * Create an infinite LazyListIterable starting at `start` and incrementing by - * step `step`. - * - * @param start the start value of the LazyListIterable - * @param step the increment value of the LazyListIterable - * @return the LazyListIterable starting at value `start`. - */ - def from(start: Int, step: Int): LazyListIterable[Int] = - newLL(sCons(start, from(start + step, step))) - - /** - * Create an infinite LazyListIterable starting at `start` and incrementing by `1`. - * - * @param start the start value of the LazyListIterable - * @return the LazyListIterable starting at value `start`. - */ - def from(start: Int): LazyListIterable[Int] = from(start, 1) - - /** - * Create an infinite LazyListIterable containing the given element expression (which - * is computed for each occurrence). - * - * @param elem the element composing the resulting LazyListIterable - * @return the LazyListIterable containing an infinite number of elem - */ - def continually[A](elem: => A): LazyListIterable[A]^{elem} = newLL(sCons(elem, continually(elem))) - - override def fill[A](n: Int)(elem: => A): LazyListIterable[A]^{elem} = - if (n > 0) newLL(sCons(elem, fill(n - 1)(elem))) else empty - - override def tabulate[A](n: Int)(f: Int => A): LazyListIterable[A]^{f} = { - def at(index: Int): LazyListIterable[A]^{f} = - if (index < n) newLL(sCons(f(index), at(index + 1))) else empty - - at(0) - } - - // significantly simpler than the iterator returned by Iterator.unfold - override def unfold[A, S](init: S)(f: S => Option[(A, S)]): LazyListIterable[A]^{f} = - newLL { - f(init) match { - case Some((elem, state)) => sCons(elem, unfold(state)(f)) - case None => State.Empty - } - } - - /** The builder returned by this method only evaluates elements - * of collections added to it as needed. - * - * @tparam A the type of the ${coll}’s elements - * @return A builder for $Coll objects. - */ - def newBuilder[A]: Builder[A, LazyListIterable[A]] = new LazyBuilder[A] - - private class LazyIterator[+A](private[this] var lazyList: LazyListIterable[A]^) extends AbstractIterator[A] { - override def hasNext: Boolean = !lazyList.isEmpty - - override def next(): A = - if (lazyList.isEmpty) Iterator.empty.next() - else { - val res = lazyList.head - lazyList = lazyList.tail - res - } - } - - private class SlidingIterator[A](private[this] var lazyList: LazyListIterable[A]^, size: Int, step: Int) - extends AbstractIterator[LazyListIterable[A]] { - this: SlidingIterator[A]^ => - private val minLen = size - step max 0 - private var first = true - - def hasNext: Boolean = - if (first) !lazyList.isEmpty - else lazyList.lengthGt(minLen) - - def next(): LazyListIterable[A] = { - if (!hasNext) Iterator.empty.next() - else { - first = false - val list = lazyList - lazyList = list.drop(step) - list.take(size) - } - } - } - - private final class WithFilter[A] private[LazyListIterable](lazyList: LazyListIterable[A]^, p: A => Boolean) - extends collection.WithFilter[A, LazyListIterable] { - this: WithFilter[A]^ => - private[this] val filtered = lazyList.filter(p) - def map[B](f: A => B): LazyListIterable[B]^{this, f} = filtered.map(f) - def flatMap[B](f: A => IterableOnce[B]^): LazyListIterable[B]^{this, f} = filtered.flatMap(f) - def foreach[U](f: A => U): Unit = filtered.foreach(f) - def withFilter(q: A => Boolean): collection.WithFilter[A, LazyListIterable]^{this, q} = new WithFilter(filtered, q) - } - - private final class LazyBuilder[A] extends ReusableBuilder[A, LazyListIterable[A]] { - import LazyBuilder._ - - private[this] var next: DeferredState[A @uncheckedCaptures] = _ - private[this] var list: LazyListIterable[A @uncheckedCaptures] = _ - - clear() - - override def clear(): Unit = { - val deferred = new DeferredState[A] - list = newLL(deferred.eval()) - next = deferred - } - - override def result(): LazyListIterable[A] = { - next init State.Empty - list - } - - override def addOne(elem: A): this.type = { - val deferred = new DeferredState[A] - next init sCons(elem, newLL(deferred.eval())) - next = deferred - this - } - - // lazy implementation which doesn't evaluate the collection being added - override def addAll(xs: IterableOnce[A]^): this.type = { - if (xs.knownSize != 0) { - val deferred = new DeferredState[A] - next.init(stateFromIteratorConcatSuffix(xs.iterator)(deferred.eval())) - next = deferred - } - this - } - } - - private object LazyBuilder { - final class DeferredState[A] { - this: DeferredState[A]^ => - private[this] var _state: (() => State[A]^) @uncheckedCaptures = _ - - def eval(): State[A]^ = { - val state = _state - if (state == null) throw new IllegalStateException("uninitialized") - state() - } - - // racy - def init(state: => State[A]^): Unit = { - if (_state != null) throw new IllegalStateException("already initialized") - _state = () => state - } - } - } - - /** This serialization proxy is used for LazyLists which start with a sequence of evaluated cons cells. - * The forced sequence is serialized in a compact, sequential format, followed by the unevaluated tail, which uses - * standard Java serialization to store the complete structure of unevaluated thunks. This allows the serialization - * of long evaluated lazy lists without exhausting the stack through recursive serialization of cons cells. - */ - @SerialVersionUID(3L) - final class SerializationProxy[A](@transient protected var coll: LazyListIterable[A]^) extends Serializable { - - private[this] def writeObject(out: ObjectOutputStream): Unit = { - out.defaultWriteObject() - var these = coll - while (these.knownNonEmpty) { - out.writeObject(these.head) - these = these.tail - } - out.writeObject(SerializeEnd) - out.writeObject(these) - } - - private[this] def readObject(in: ObjectInputStream): Unit = { - in.defaultReadObject() - val init = new mutable.ListBuffer[A @uncheckedCaptures] - var initRead = false - while (!initRead) in.readObject match { - case SerializeEnd => initRead = true - case a => init += a.asInstanceOf[A] - } - val tail = in.readObject().asInstanceOf[LazyListIterable[A]] - // scala/scala#10118: caution that no code path can evaluate `tail.state` - // before the resulting LazyListIterable is returned - val it = init.toList.iterator - coll = newLL(stateFromIteratorConcatSuffix(it)(tail.state)) - } - - private[this] def readResolve(): Any = coll - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/ListMap.scala b/tests/pos-special/stdlib/collection/immutable/ListMap.scala deleted file mode 100644 index c5000d785144..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/ListMap.scala +++ /dev/null @@ -1,373 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import scala.annotation.tailrec -import scala.collection.mutable.ReusableBuilder -import scala.collection.generic.DefaultSerializable -import scala.runtime.Statics.releaseFence -import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** - * This class implements immutable maps using a list-based data structure. List map iterators and - * traversal methods visit key-value pairs in the order they were first inserted. - * - * Entries are stored internally in reversed insertion order, which means the newest key is at the - * head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and `init` - * are O(1). Other operations, such as inserting or removing entries, are also O(n), which makes - * this collection suitable only for a small number of elements. - * - * Instances of `ListMap` represent empty maps; they can be either created by calling the - * constructor directly, or by applying the function `ListMap.empty`. - * - * @tparam K the type of the keys contained in this list map - * @tparam V the type of the values associated with the keys - * - * @define Coll ListMap - * @define coll list map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -sealed class ListMap[K, +V] - extends AbstractMap[K, V] - with SeqMap[K, V] - with StrictOptimizedMapOps[K, V, ListMap, ListMap[K, V]] - with MapFactoryDefaults[K, V, ListMap, Iterable] - with DefaultSerializable { - - override def mapFactory: MapFactory[ListMap] = ListMap - - override def size: Int = 0 - - override def isEmpty: Boolean = true - - override def knownSize: Int = 0 - def get(key: K): Option[V] = None - - def updated[V1 >: V](key: K, value: V1): ListMap[K, V1] = new ListMap.Node[K, V1](key, value, this) - - def removed(key: K): ListMap[K, V] = this - - def iterator: Iterator[(K, V)] = { - var curr: ListMap[K, V] = this - var res: List[(K, V)] = Nil - while (curr.nonEmpty) { - res = (curr.key, curr.value) :: res - curr = curr.next - } - res.iterator - } - - override def keys: Iterable[K] = { - var curr: ListMap[K, V] = this - var res: List[K] = Nil - while (curr.nonEmpty) { - res = curr.key :: res - curr = curr.next - } - res - } - - override def hashCode(): Int = { - if (isEmpty) MurmurHash3.emptyMapHash - else { - // Can't efficiently override foreachEntry directly in ListMap because it would need to preserve iteration - // order be reversing the list first. But mapHash is symmetric so the reversed order is fine here. - val _reversed = new immutable.AbstractMap[K, V] { - override def isEmpty: Boolean = ListMap.this.isEmpty - override def removed(key: K): Map[K, V] = ListMap.this.removed(key) - override def updated[V1 >: V](key: K, value: V1): Map[K, V1] = ListMap.this.updated(key, value) - override def get(key: K): Option[V] = ListMap.this.get(key) - override def iterator: Iterator[(K, V)] = ListMap.this.iterator - override def foreachEntry[U](f: (K, V) => U): Unit = { - var curr: ListMap[K, V] = ListMap.this - while (curr.nonEmpty) { - f(curr.key, curr.value) - curr = curr.next - } - } - } - MurmurHash3.mapHash(_reversed) - } - } - - private[immutable] def key: K = throw new NoSuchElementException("key of empty map") - private[immutable] def value: V = throw new NoSuchElementException("value of empty map") - private[immutable] def next: ListMap[K, V] = throw new NoSuchElementException("next of empty map") - - override def foldRight[Z](z: Z)(op: ((K, V), Z) => Z): Z = ListMap.foldRightInternal(this, z, op) - override protected[this] def className = "ListMap" - -} - -/** - * $factoryInfo - * - * Note that each element insertion takes O(n) time, which means that creating a list map with - * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of - * elements. - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#list-maps "Scala's Collection Library overview"]] - * section on `List Maps` for more information. - * @define Coll ListMap - * @define coll list map - */ -@SerialVersionUID(3L) -object ListMap extends MapFactory[ListMap] { - /** - * Represents an entry in the `ListMap`. - */ - private[immutable] final class Node[K, V]( - override private[immutable] val key: K, - private[immutable] var _value: V @uncheckedCaptures, - private[immutable] var _init: ListMap[K, V] @uncheckedCaptures - ) extends ListMap[K, V] { - releaseFence() - - override private[immutable] def value: V = _value - - override def size: Int = sizeInternal(this, 0) - - @tailrec private[this] def sizeInternal(cur: ListMap[K, V], acc: Int): Int = - if (cur.isEmpty) acc - else sizeInternal(cur.next, acc + 1) - - override def isEmpty: Boolean = false - - override def knownSize: Int = -1 - - @throws[NoSuchElementException] - override def apply(k: K): V = applyInternal(this, k) - - @tailrec private[this] def applyInternal(cur: ListMap[K, V], k: K): V = - if (cur.isEmpty) throw new NoSuchElementException("key not found: " + k) - else if (k == cur.key) cur.value - else applyInternal(cur.next, k) - - override def get(k: K): Option[V] = getInternal(this, k) - - @tailrec private[this] def getInternal(cur: ListMap[K, V], k: K): Option[V] = - if (cur.isEmpty) None - else if (k == cur.key) Some(cur.value) - else getInternal(cur.next, k) - - override def contains(k: K): Boolean = containsInternal(this, k) - - @tailrec private[this] def containsInternal(cur: ListMap[K, V], k: K): Boolean = - if (cur.isEmpty) false - else if (k == cur.key) true - else containsInternal(cur.next, k) - - override def updated[V1 >: V](k: K, v: V1): ListMap[K, V1] = { - - var index = -1 // the index (in reverse) where the key to update exists, if it is found - var found = false // true if the key is found int he map - var isDifferent = false // true if the key was found and the values are different - - { - var curr: ListMap[K, V] = this - - while (curr.nonEmpty && !found) { - if (k == curr.key) { - found = true - isDifferent = v.asInstanceOf[AnyRef] ne curr.value.asInstanceOf[AnyRef] - } - index += 1 - curr = curr.init - } - } - - if (found) { - if (isDifferent) { - var newHead: ListMap.Node[K, V1] = null - var prev: ListMap.Node[K, V1] = null - var curr: ListMap[K, V1] = this - var i = 0 - while (i < index) { - val temp = new ListMap.Node(curr.key, curr.value, null) - if (prev ne null) { - prev._init = temp - } - prev = temp - curr = curr.init - if (newHead eq null) { - newHead = prev - } - i += 1 - } - val newNode = new ListMap.Node(curr.key, v, curr.init) - if (prev ne null) { - prev._init = newNode - } - releaseFence() - if (newHead eq null) newNode else newHead - } else { - this - } - } else { - new ListMap.Node(k, v, this) - } - } - - @tailrec private[this] def removeInternal(k: K, cur: ListMap[K, V], acc: List[ListMap[K, V]]): ListMap[K, V] = - if (cur.isEmpty) acc.last - else if (k == cur.key) acc.foldLeft(cur.next) { (t, h) => new Node(h.key, h.value, t) } - else removeInternal(k, cur.next, cur :: acc) - - override def removed(k: K): ListMap[K, V] = removeInternal(k, this, Nil) - - override private[immutable] def next: ListMap[K, V] = _init - - override def last: (K, V) = (key, value) - override def init: ListMap[K, V] = next - - } - - def empty[K, V]: ListMap[K, V] = EmptyListMap.asInstanceOf[ListMap[K, V]] - - private object EmptyListMap extends ListMap[Any, Nothing] - - def from[K, V](it: collection.IterableOnce[(K, V)]^): ListMap[K, V] = - it match { - case lm: ListMap[K, V] => lm - case lhm: collection.mutable.LinkedHashMap[K, V] => - // by directly iterating through LinkedHashMap entries, we save creating intermediate tuples for each - // key-value pair - var current: ListMap[K, V] = empty[K, V] - var firstEntry = lhm._firstEntry - while (firstEntry ne null) { - current = new Node(firstEntry.key, firstEntry.value, current) - firstEntry = firstEntry.later - } - current - case _: collection.Map[K, V] | _: collection.MapView[K, V] => - // when creating from a map, we need not handle duplicate keys, so we can just append each key-value to the end - var current: ListMap[K, V] = empty[K, V] - val iter = it.iterator - while (iter.hasNext) { - val (k, v) = iter.next() - current = new Node(k, v, current) - } - current - - case _ => (newBuilder[K, V] ++= it).result() - } - - /** Returns a new ListMap builder - * - * The implementation safely handles additions after `result()` without calling `clear()` - * - * @tparam K the map key type - * @tparam V the map value type - */ - def newBuilder[K, V]: ReusableBuilder[(K, V), ListMap[K, V]] = new ListMapBuilder[K, V] - - @tailrec private def foldRightInternal[K, V, Z](map: ListMap[K, V], prevValue: Z, op: ((K, V), Z) => Z): Z = { - if (map.isEmpty) prevValue - else foldRightInternal(map.init, op(map.last, prevValue), op) - } -} - -/** Builder for ListMap. - * $multipleResults - */ -private[immutable] final class ListMapBuilder[K, V] extends mutable.ReusableBuilder[(K, V), ListMap[K, V]] { - private[this] var isAliased: Boolean = false - private[this] var underlying: ListMap[K, V] @uncheckedCaptures = ListMap.empty - - override def clear(): Unit = { - underlying = ListMap.empty - isAliased = false - } - - override def result(): ListMap[K, V] = { - isAliased = true - releaseFence() - underlying - } - - override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) - - @tailrec - private[this] def insertValueAtKeyReturnFound(m: ListMap[K, V], key: K, value: V): Boolean = m match { - case n: ListMap.Node[K, V] => - if (n.key == key) { - n._value = value - true - } else { - insertValueAtKeyReturnFound(n.init, key, value) - } - case _ => false - } - - def addOne(key: K, value: V): this.type = { - if (isAliased) { - underlying = underlying.updated(key, value) - } else { - if (!insertValueAtKeyReturnFound(underlying, key, value)) { - underlying = new ListMap.Node(key, value, underlying) - } - } - this - } - override def addAll(xs: IterableOnce[(K, V)]^): this.type = { - if (isAliased) { - super.addAll(xs) - } else if (underlying.nonEmpty) { - xs match { - case m: collection.Map[K, V] => - // if it is a map, then its keys will not collide with themselves. - // therefor we only need to check the already-existing elements for collisions. - // No need to check the entire list - - val iter = m.iterator - var newUnderlying = underlying - while (iter.hasNext) { - val next = iter.next() - if (!insertValueAtKeyReturnFound(underlying, next._1, next._2)) { - newUnderlying = new ListMap.Node[K, V](next._1, next._2, newUnderlying) - } - } - underlying = newUnderlying - this - - case _ => - super.addAll(xs) - } - } else xs match { - case lhm: collection.mutable.LinkedHashMap[K, V] => - // special-casing LinkedHashMap avoids creating of Iterator and tuples for each key-value - var firstEntry = lhm._firstEntry - while (firstEntry ne null) { - underlying = new ListMap.Node(firstEntry.key, firstEntry.value, underlying) - firstEntry = firstEntry.later - } - this - - case _: collection.Map[K, V] | _: collection.MapView[K, V] => - val iter = xs.iterator - while (iter.hasNext) { - val (k, v) = iter.next() - underlying = new ListMap.Node(k, v, underlying) - } - - this - case _ => - super.addAll(xs) - } - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/ListSet.scala b/tests/pos-special/stdlib/collection/immutable/ListSet.scala deleted file mode 100644 index 719abd78e1e6..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/ListSet.scala +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import mutable.{Builder, ImmutableBuilder} -import scala.annotation.tailrec -import scala.collection.generic.DefaultSerializable -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** - * This class implements immutable sets using a list-based data structure. List set iterators and - * traversal methods visit elements in the order they were first inserted. - * - * Elements are stored internally in reversed insertion order, which means the newest element is at - * the head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and - * `init` are O(1). Other operations, such as inserting or removing entries, are also O(n), which - * makes this collection suitable only for a small number of elements. - * - * Instances of `ListSet` represent empty sets; they can be either created by calling the - * constructor directly, or by applying the function `ListSet.empty`. - * - * @tparam A the type of the elements contained in this list set - * - * @define Coll ListSet - * @define coll list set - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -sealed class ListSet[A] - extends AbstractSet[A] - with StrictOptimizedSetOps[A, ListSet, ListSet[A]] - with IterableFactoryDefaults[A, ListSet] - with DefaultSerializable { - - override protected[this] def className: String = "ListSet" - - override def size: Int = 0 - override def knownSize: Int = 0 - override def isEmpty: Boolean = true - - def contains(elem: A): Boolean = false - - def incl(elem: A): ListSet[A] = new Node(elem) - def excl(elem: A): ListSet[A] = this - - def iterator: scala.collection.Iterator[A] = { - var curr: ListSet[A] = this - var res: List[A] = Nil - while (!curr.isEmpty) { - res = curr.elem :: res - curr = curr.next - } - res.iterator - } - - protected def elem: A = throw new NoSuchElementException("elem of empty set") - protected def next: ListSet[A] = throw new NoSuchElementException("next of empty set") - - override def iterableFactory: IterableFactory[ListSet] = ListSet - - /** - * Represents an entry in the `ListSet`. - */ - protected class Node(override protected val elem: A) extends ListSet[A] { - - override def size = sizeInternal(this, 0) - override def knownSize: Int = -1 - @tailrec private[this] def sizeInternal(n: ListSet[A], acc: Int): Int = - if (n.isEmpty) acc - else sizeInternal(n.next, acc + 1) - - override def isEmpty: Boolean = false - - override def contains(e: A): Boolean = containsInternal(this, e) - - @tailrec private[this] def containsInternal(n: ListSet[A], e: A): Boolean = - !n.isEmpty && (n.elem == e || containsInternal(n.next, e)) - - override def incl(e: A): ListSet[A] = if (contains(e)) this else new Node(e) - - override def excl(e: A): ListSet[A] = removeInternal(e, this, Nil) - - @tailrec private[this] def removeInternal(k: A, cur: ListSet[A], acc: List[ListSet[A]]): ListSet[A] = - if (cur.isEmpty) acc.last - else if (k == cur.elem) acc.foldLeft(cur.next)((t, h) => new t.Node(h.elem)) - else removeInternal(k, cur.next, cur :: acc) - - override protected def next: ListSet[A] = ListSet.this - - override def last: A = elem - - override def init: ListSet[A] = next - } -} - -/** - * $factoryInfo - * - * Note that each element insertion takes O(n) time, which means that creating a list set with - * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of - * elements. - * - * @define Coll ListSet - * @define coll list set - */ -@SerialVersionUID(3L) -object ListSet extends IterableFactory[ListSet] { - - def from[E](it: scala.collection.IterableOnce[E]^): ListSet[E] = - it match { - case ls: ListSet[E] => ls - case _ if it.knownSize == 0 => empty[E] - case _ => (newBuilder[E] ++= it).result() - } - - private object EmptyListSet extends ListSet[Any] { - override def knownSize: Int = 0 - } - private[collection] def emptyInstance: ListSet[Any] = EmptyListSet - - def empty[A]: ListSet[A] = EmptyListSet.asInstanceOf[ListSet[A]] - - def newBuilder[A]: Builder[A, ListSet[A]] = - new ImmutableBuilder[A, ListSet[A]](empty) { - def addOne(elem: A): this.type = { elems = elems + elem; this } - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/LongMap.scala b/tests/pos-special/stdlib/collection/immutable/LongMap.scala deleted file mode 100644 index 4abf433273f2..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/LongMap.scala +++ /dev/null @@ -1,492 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package immutable - -import java.lang.IllegalStateException - -import scala.collection.generic.{BitOperations, DefaultSerializationProxy} -import scala.collection.mutable.{Builder, ImmutableBuilder, ListBuffer} -import scala.annotation.tailrec -import scala.annotation.unchecked.uncheckedVariance -import scala.language.implicitConversions -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** Utility class for long maps. - */ -private[immutable] object LongMapUtils extends BitOperations.Long { - def branchMask(i: Long, j: Long) = highestOneBit(i ^ j) - - def join[T](p1: Long, t1: LongMap[T], p2: Long, t2: LongMap[T]): LongMap[T] = { - val m = branchMask(p1, p2) - val p = mask(p1, m) - if (zero(p1, m)) LongMap.Bin(p, m, t1, t2) - else LongMap.Bin(p, m, t2, t1) - } - - def bin[T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]): LongMap[T] = (left, right) match { - case (left, LongMap.Nil) => left - case (LongMap.Nil, right) => right - case (left, right) => LongMap.Bin(prefix, mask, left, right) - } -} - -import LongMapUtils._ - -/** A companion object for long maps. - * - * @define Coll `LongMap` - */ -object LongMap { - def empty[T]: LongMap[T] = LongMap.Nil - def singleton[T](key: Long, value: T): LongMap[T] = LongMap.Tip(key, value) - def apply[T](elems: (Long, T)*): LongMap[T] = - elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) - - def from[V](coll: IterableOnce[(Long, V)]^): LongMap[V] = - newBuilder[V].addAll(coll).result() - - def newBuilder[V]: Builder[(Long, V), LongMap[V]] = - new ImmutableBuilder[(Long, V), LongMap[V]](empty) { - def addOne(elem: (Long, V)): this.type = { elems = elems + elem; this } - } - - private[immutable] case object Nil extends LongMap[Nothing] { - // Important, don't remove this! See IntMap for explanation. - override def equals(that : Any) = that match { - case _: this.type => true - case _: LongMap[_] => false // The only empty LongMaps are eq Nil - case _ => super.equals(that) - } - } - - private[immutable] case class Tip[+T](key: Long, value: T) extends LongMap[T] { - def withValue[S](s: S) = - if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap.Tip[S]] - else LongMap.Tip(key, s) - } - - private[immutable] case class Bin[+T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]) extends LongMap[T] { - def bin[S](left: LongMap[S], right: LongMap[S]): LongMap[S] = { - if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[LongMap.Bin[S]] - else LongMap.Bin[S](prefix, mask, left, right) - } - } - - implicit def toFactory[V](dummy: LongMap.type): Factory[(Long, V), LongMap[V]] = ToFactory.asInstanceOf[Factory[(Long, V), LongMap[V]]] - - @SerialVersionUID(3L) - private[this] object ToFactory extends Factory[(Long, AnyRef), LongMap[AnyRef]] with Serializable { - def fromSpecific(it: IterableOnce[(Long, AnyRef)]^): LongMap[AnyRef] = LongMap.from[AnyRef](it) - def newBuilder: Builder[(Long, AnyRef), LongMap[AnyRef]] = LongMap.newBuilder[AnyRef] - } - - implicit def toBuildFrom[V](factory: LongMap.type): BuildFrom[Any, (Long, V), LongMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Long, V), LongMap[V]]] - private[this] object ToBuildFrom extends BuildFrom[Any, (Long, AnyRef), LongMap[AnyRef]] { - def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]^) = LongMap.from(it) - def newBuilder(from: Any) = LongMap.newBuilder[AnyRef] - } - - implicit def iterableFactory[V]: Factory[(Long, V), LongMap[V]] = toFactory(this) - implicit def buildFromLongMap[V]: BuildFrom[LongMap[_], (Long, V), LongMap[V]] = toBuildFrom(this) -} - -// Iterator over a non-empty LongMap. -private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends AbstractIterator[T] { - - // Basically this uses a simple stack to emulate conversion over the tree. However - // because we know that Longs are only 64 bits we can have at most 64 LongMap.Bins and - // one LongMap.Tip sitting on the tree at any point. Therefore we know the maximum stack - // depth is 65 - var index = 0 - var buffer = new Array[AnyRef](65) - - def pop() = { - index -= 1 - buffer(index).asInstanceOf[LongMap[V]] - } - - def push(x: LongMap[V]): Unit = { - buffer(index) = x.asInstanceOf[AnyRef] - index += 1 - } - push(it) - - /** - * What value do we assign to a tip? - */ - def valueOf(tip: LongMap.Tip[V]): T - - def hasNext = index != 0 - @tailrec - final def next(): T = - pop() match { - case LongMap.Bin(_,_, t@LongMap.Tip(_, _), right) => { - push(right) - valueOf(t) - } - case LongMap.Bin(_, _, left, right) => { - push(right) - push(left) - next() - } - case t@LongMap.Tip(_, _) => valueOf(t) - // This should never happen. We don't allow LongMap.Nil in subtrees of the LongMap - // and don't return an LongMapIterator for LongMap.Nil. - case LongMap.Nil => throw new IllegalStateException("Empty maps not allowed as subtrees") - } -} - -private[immutable] class LongMapEntryIterator[V](it: LongMap[V]) extends LongMapIterator[V, (Long, V)](it){ - def valueOf(tip: LongMap.Tip[V]) = (tip.key, tip.value) -} - -private[immutable] class LongMapValueIterator[V](it: LongMap[V]) extends LongMapIterator[V, V](it){ - def valueOf(tip: LongMap.Tip[V]) = tip.value -} - -private[immutable] class LongMapKeyIterator[V](it: LongMap[V]) extends LongMapIterator[V, Long](it){ - def valueOf(tip: LongMap.Tip[V]) = tip.key -} - -/** - * Specialised immutable map structure for long keys, based on - * [[https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.37.5452 Fast Mergeable Long Maps]] - * by Okasaki and Gill. Essentially a trie based on binary digits of the integers. - * - * Note: This class is as of 2.8 largely superseded by HashMap. - * - * @tparam T type of the values associated with the long keys. - * - * @define Coll `immutable.LongMap` - * @define coll immutable long integer map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -sealed abstract class LongMap[+T] extends AbstractMap[Long, T] - with StrictOptimizedMapOps[Long, T, Map, LongMap[T]] - with Serializable { - - override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, T) @uncheckedVariance]^): LongMap[T] = { - //TODO should this be the default implementation of this method in StrictOptimizedIterableOps? - val b = newSpecificBuilder - b.sizeHint(coll) - b.addAll(coll) - b.result() - } - override protected def newSpecificBuilder: Builder[(Long, T), LongMap[T]] @uncheckedVariance = - new ImmutableBuilder[(Long, T), LongMap[T]](empty) { - def addOne(elem: (Long, T)): this.type = { elems = elems + elem; this } - } - - override def empty: LongMap[T] = LongMap.Nil - - override def toList = { - val buffer = new ListBuffer[(Long, T) @uncheckedCaptures] - foreach(buffer += _) - buffer.toList - } - - /** - * Iterator over key, value pairs of the map in unsigned order of the keys. - * - * @return an iterator over pairs of long keys and corresponding values. - */ - def iterator: Iterator[(Long, T)] = this match { - case LongMap.Nil => Iterator.empty - case _ => new LongMapEntryIterator(this) - } - - /** - * Loops over the key, value pairs of the map in unsigned order of the keys. - */ - override final def foreach[U](f: ((Long, T)) => U): Unit = this match { - case LongMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) } - case LongMap.Tip(key, value) => f((key, value)) - case LongMap.Nil => - } - - override final def foreachEntry[U](f: (Long, T) => U): Unit = this match { - case LongMap.Bin(_, _, left, right) => { left.foreachEntry(f); right.foreachEntry(f) } - case LongMap.Tip(key, value) => f(key, value) - case LongMap.Nil => - } - - override def keysIterator: Iterator[Long] = this match { - case LongMap.Nil => Iterator.empty - case _ => new LongMapKeyIterator(this) - } - - /** - * Loop over the keys of the map. The same as keys.foreach(f), but may - * be more efficient. - * - * @param f The loop body - */ - final def foreachKey[U](f: Long => U): Unit = this match { - case LongMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) } - case LongMap.Tip(key, _) => f(key) - case LongMap.Nil => - } - - override def valuesIterator: Iterator[T] = this match { - case LongMap.Nil => Iterator.empty - case _ => new LongMapValueIterator(this) - } - - /** - * Loop over the values of the map. The same as values.foreach(f), but may - * be more efficient. - * - * @param f The loop body - */ - final def foreachValue[U](f: T => U): Unit = this match { - case LongMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) } - case LongMap.Tip(_, value) => f(value) - case LongMap.Nil => - } - - override protected[this] def className = "LongMap" - - override def isEmpty = this eq LongMap.Nil - override def knownSize: Int = if (isEmpty) 0 else super.knownSize - override def filter(f: ((Long, T)) => Boolean): LongMap[T] = this match { - case LongMap.Bin(prefix, mask, left, right) => { - val (newleft, newright) = (left.filter(f), right.filter(f)) - if ((left eq newleft) && (right eq newright)) this - else bin(prefix, mask, newleft, newright) - } - case LongMap.Tip(key, value) => - if (f((key, value))) this - else LongMap.Nil - case LongMap.Nil => LongMap.Nil - } - - override def transform[S](f: (Long, T) => S): LongMap[S] = this match { - case b@LongMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f)) - case t@LongMap.Tip(key, value) => t.withValue(f(key, value)) - case LongMap.Nil => LongMap.Nil - } - - final override def size: Int = this match { - case LongMap.Nil => 0 - case LongMap.Tip(_, _) => 1 - case LongMap.Bin(_, _, left, right) => left.size + right.size - } - - @tailrec - final def get(key: Long): Option[T] = this match { - case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key) - case LongMap.Tip(key2, value) => if (key == key2) Some(value) else None - case LongMap.Nil => None - } - - @tailrec - final override def getOrElse[S >: T](key: Long, default: => S): S = this match { - case LongMap.Nil => default - case LongMap.Tip(key2, value) => if (key == key2) value else default - case LongMap.Bin(prefix, mask, left, right) => - if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default) - } - - @tailrec - final override def apply(key: Long): T = this match { - case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key) - case LongMap.Tip(key2, value) => if (key == key2) value else throw new IllegalArgumentException("Key not found") - case LongMap.Nil => throw new IllegalArgumentException("key not found") - } - - override def + [S >: T] (kv: (Long, S)): LongMap[S] = updated(kv._1, kv._2) - - override def updated[S >: T](key: Long, value: S): LongMap[S] = this match { - case LongMap.Bin(prefix, mask, left, right) => - if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this) - else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updated(key, value), right) - else LongMap.Bin(prefix, mask, left, right.updated(key, value)) - case LongMap.Tip(key2, value2) => - if (key == key2) LongMap.Tip(key, value) - else join(key, LongMap.Tip(key, value), key2, this) - case LongMap.Nil => LongMap.Tip(key, value) - } - - /** - * Updates the map, using the provided function to resolve conflicts if the key is already present. - * - * Equivalent to - * {{{ - * this.get(key) match { - * case None => this.update(key, value) - * case Some(oldvalue) => this.update(key, f(oldvalue, value) - * } - * }}} - * - * @tparam S The supertype of values in this `LongMap`. - * @param key The key to update. - * @param value The value to use if there is no conflict. - * @param f The function used to resolve conflicts. - * @return The updated map. - */ - def updateWith[S >: T](key: Long, value: S, f: (T, S) => S): LongMap[S] = this match { - case LongMap.Bin(prefix, mask, left, right) => - if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this) - else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updateWith(key, value, f), right) - else LongMap.Bin(prefix, mask, left, right.updateWith(key, value, f)) - case LongMap.Tip(key2, value2) => - if (key == key2) LongMap.Tip(key, f(value2, value)) - else join(key, LongMap.Tip(key, value), key2, this) - case LongMap.Nil => LongMap.Tip(key, value) - } - - def removed(key: Long): LongMap[T] = this match { - case LongMap.Bin(prefix, mask, left, right) => - if (!hasMatch(key, prefix, mask)) this - else if (zero(key, mask)) bin(prefix, mask, left - key, right) - else bin(prefix, mask, left, right - key) - case LongMap.Tip(key2, _) => - if (key == key2) LongMap.Nil - else this - case LongMap.Nil => LongMap.Nil - } - - /** - * A combined transform and filter function. Returns an `LongMap` such that - * for each `(key, value)` mapping in this map, if `f(key, value) == None` - * the map contains no mapping for key, and if `f(key, value)`. - * - * @tparam S The type of the values in the resulting `LongMap`. - * @param f The transforming function. - * @return The modified map. - */ - def modifyOrRemove[S](f: (Long, T) => Option[S]): LongMap[S] = this match { - case LongMap.Bin(prefix, mask, left, right) => { - val newleft = left.modifyOrRemove(f) - val newright = right.modifyOrRemove(f) - if ((left eq newleft) && (right eq newright)) this.asInstanceOf[LongMap[S]] - else bin(prefix, mask, newleft, newright) - } - case LongMap.Tip(key, value) => f(key, value) match { - case None => LongMap.Nil - case Some(value2) => - //hack to preserve sharing - if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap[S]] - else LongMap.Tip(key, value2) - } - case LongMap.Nil => LongMap.Nil - } - - /** - * Forms a union map with that map, using the combining function to resolve conflicts. - * - * @tparam S The type of values in `that`, a supertype of values in `this`. - * @param that The map to form a union with. - * @param f The function used to resolve conflicts between two mappings. - * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`. - */ - def unionWith[S >: T](that: LongMap[S], f: (Long, S, S) => S): LongMap[S] = (this, that) match{ - case (LongMap.Bin(p1, m1, l1, r1), that@(LongMap.Bin(p2, m2, l2, r2))) => - if (shorter(m1, m2)) { - if (!hasMatch(p2, p1, m1)) join(p1, this, p2, that) - else if (zero(p2, m1)) LongMap.Bin(p1, m1, l1.unionWith(that, f), r1) - else LongMap.Bin(p1, m1, l1, r1.unionWith(that, f)) - } else if (shorter(m2, m1)){ - if (!hasMatch(p1, p2, m2)) join(p1, this, p2, that) - else if (zero(p1, m2)) LongMap.Bin(p2, m2, this.unionWith(l2, f), r2) - else LongMap.Bin(p2, m2, l2, this.unionWith(r2, f)) - } - else { - if (p1 == p2) LongMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f)) - else join(p1, this, p2, that) - } - case (LongMap.Tip(key, value), x) => x.updateWith(key, value, (x, y) => f(key, y, x)) - case (x, LongMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y)) - case (LongMap.Nil, x) => x - case (x, LongMap.Nil) => x - } - - /** - * Forms the intersection of these two maps with a combining function. The - * resulting map is a map that has only keys present in both maps and has - * values produced from the original mappings by combining them with `f`. - * - * @tparam S The type of values in `that`. - * @tparam R The type of values in the resulting `LongMap`. - * @param that The map to intersect with. - * @param f The combining function. - * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`. - */ - def intersectionWith[S, R](that: LongMap[S], f: (Long, T, S) => R): LongMap[R] = (this, that) match { - case (LongMap.Bin(p1, m1, l1, r1), that@LongMap.Bin(p2, m2, l2, r2)) => - if (shorter(m1, m2)) { - if (!hasMatch(p2, p1, m1)) LongMap.Nil - else if (zero(p2, m1)) l1.intersectionWith(that, f) - else r1.intersectionWith(that, f) - } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f)) - else { - if (!hasMatch(p1, p2, m2)) LongMap.Nil - else if (zero(p1, m2)) this.intersectionWith(l2, f) - else this.intersectionWith(r2, f) - } - case (LongMap.Tip(key, value), that) => that.get(key) match { - case None => LongMap.Nil - case Some(value2) => LongMap.Tip(key, f(key, value, value2)) - } - case (_, LongMap.Tip(key, value)) => this.get(key) match { - case None => LongMap.Nil - case Some(value2) => LongMap.Tip(key, f(key, value2, value)) - } - case (_, _) => LongMap.Nil - } - - /** - * Left biased intersection. Returns the map that has all the same mappings as this but only for keys - * which are present in the other map. - * - * @tparam R The type of values in `that`. - * @param that The map to intersect with. - * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`. - */ - def intersection[R](that: LongMap[R]): LongMap[T] = - this.intersectionWith(that, (key: Long, value: T, value2: R) => value) - - def ++[S >: T](that: LongMap[S]) = - this.unionWith[S](that, (key, x, y) => y) - - @tailrec - final def firstKey: Long = this match { - case LongMap.Bin(_, _, l, r) => l.firstKey - case LongMap.Tip(k, v) => k - case LongMap.Nil => throw new IllegalStateException("Empty set") - } - - @tailrec - final def lastKey: Long = this match { - case LongMap.Bin(_, _, l, r) => r.lastKey - case LongMap.Tip(k , v) => k - case LongMap.Nil => throw new IllegalStateException("Empty set") - } - - def map[V2](f: ((Long, T)) => (Long, V2)): LongMap[V2] = LongMap.from(new View.Map(coll, f)) - - def flatMap[V2](f: ((Long, T)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f)) - - override def concat[V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = - super.concat(that).asInstanceOf[LongMap[V1]] // Already has correct type but not declared as such - - override def ++ [V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = concat(that) - - def collect[V2](pf: PartialFunction[(Long, T), (Long, V2)]): LongMap[V2] = - strictOptimizedCollect(LongMap.newBuilder[V2], pf) - - protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(LongMap.toFactory[T](LongMap), this) -} diff --git a/tests/pos-special/stdlib/collection/immutable/Map.scala b/tests/pos-special/stdlib/collection/immutable/Map.scala deleted file mode 100644 index 6daad829bf55..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/Map.scala +++ /dev/null @@ -1,694 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import scala.annotation.unchecked.uncheckedVariance -import scala.collection.generic.DefaultSerializable -import scala.collection.immutable.Map.Map4 -import scala.collection.mutable.{Builder, ReusableBuilder} -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** Base type of immutable Maps */ -trait Map[K, +V] - extends Iterable[(K, V)] - with collection.Map[K, V] - with MapOps[K, V, Map, Map[K, V]] - with MapFactoryDefaults[K, V, Map, Iterable] { - - override def mapFactory: scala.collection.MapFactory[Map] = Map - - override final def toMap[K2, V2](implicit ev: (K, V) <:< (K2, V2)): Map[K2, V2] = this.asInstanceOf[Map[K2, V2]] - - /** The same map with a given default function. - * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. - * are not affected by `withDefault`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d the function mapping keys to values, used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefault[V1 >: V](d: K -> V1): Map[K, V1] = new Map.WithDefault[K, V1](this, d) - - /** The same map with a given default value. - * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. - * are not affected by `withDefaultValue`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d default value used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefaultValue[V1 >: V](d: V1): Map[K, V1] = new Map.WithDefault[K, V1](this, _ => d) -} - -/** Base trait of immutable Maps implementations - * - * @define coll immutable map - * @define Coll `immutable.Map` - */ -trait MapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] - extends IterableOps[(K, V), Iterable, C] - with collection.MapOps[K, V, CC, C] { - - protected def coll: C with CC[K, V] - - /** Removes a key from this map, returning a new map. - * - * @param key the key to be removed - * @return a new map without a binding for ''key'' - */ - def removed(key: K): C - - /** Alias for `removed` */ - @`inline` final def - (key: K): C = removed(key) - - @deprecated("Use -- with an explicit collection", "2.13.0") - def - (key1: K, key2: K, keys: K*): C = removed(key1).removed(key2).removedAll(keys) - - /** Creates a new $coll from this $coll by removing all elements of another - * collection. - * - * $willForceEvaluation - * - * @param keys the collection containing the removed elements. - * @return a new $coll that contains all elements of the current $coll - * except one less occurrence of each of the elements of `elems`. - */ - def removedAll(keys: IterableOnce[K]^): C = keys.iterator.foldLeft[C](coll)(_ - _) - - /** Alias for `removedAll` */ - @`inline` final override def -- (keys: IterableOnce[K]^): C = removedAll(keys) - - /** Creates a new map obtained by updating this map with a given key/value pair. - * @param key the key - * @param value the value - * @tparam V1 the type of the added value - * @return A new map with the new key/value mapping added to this map. - */ - def updated[V1 >: V](key: K, value: V1): CC[K, V1] - - /** - * Update a mapping for the specified key and its current optionally-mapped value - * (`Some` if there is current mapping, `None` if not). - * - * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`. - * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent). - * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged. - * - * @param key the key value - * @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping - * @return A new map with the updated mapping with the key - */ - def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): CC[K,V1] = { - val previousValue = this.get(key) - remappingFunction(previousValue) match { - case None => previousValue.fold(coll)(_ => this.removed(key).coll) - case Some(nextValue) => - if (previousValue.exists(_.asInstanceOf[AnyRef] eq nextValue.asInstanceOf[AnyRef])) coll - else coll.updated(key, nextValue) - } - } - - /** - * Alias for `updated` - * - * @param kv the key/value pair. - * @tparam V1 the type of the value in the key/value pair. - * @return A new map with the new binding added to this map. - */ - override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = updated(kv._1, kv._2) - - /** This function transforms all the values of mappings contained - * in this map with function `f`. - * - * @param f A function over keys and values - * @return the updated map - */ - def transform[W](f: (K, V) => W): CC[K, W] = map { case (k, v) => (k, f(k, v)) } - - override def keySet: Set[K] = new ImmutableKeySet - - /** The implementation class of the set returned by `keySet` */ - protected class ImmutableKeySet extends AbstractSet[K] with GenKeySet with DefaultSerializable { - def incl(elem: K): Set[K] = if (this(elem)) this else empty ++ this + elem - def excl(elem: K): Set[K] = if (this(elem)) empty ++ this - elem else this - } - -} - -trait StrictOptimizedMapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] - extends MapOps[K, V, CC, C] - with collection.StrictOptimizedMapOps[K, V, CC, C] - with StrictOptimizedIterableOps[(K, V), Iterable, C] { - - override def concat [V1 >: V](that: collection.IterableOnce[(K, V1)]^): CC[K, V1] = { - var result: CC[K, V1] = coll - val it = that.iterator - while (it.hasNext) result = result + it.next() - result - } -} - - -/** - * $factoryInfo - * @define coll immutable map - * @define Coll `immutable.Map` - */ -@SerialVersionUID(3L) -object Map extends MapFactory[Map] { - - @SerialVersionUID(3L) - class WithDefault[K, +V](val underlying: Map[K, V], val defaultValue: K -> V) - extends AbstractMap[K, V] - with MapOps[K, V, Map, WithDefault[K, V]] with Serializable { - - def get(key: K): Option[V] = underlying.get(key) - - override def default(key: K): V = defaultValue(key) - - override def iterableFactory: IterableFactory[Iterable] = underlying.iterableFactory - - def iterator: Iterator[(K, V)] = underlying.iterator - - override def isEmpty: Boolean = underlying.isEmpty - - override def mapFactory: MapFactory[Map] = underlying.mapFactory - - override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]^): WithDefault[K, V2] = - new WithDefault(underlying.concat(xs), defaultValue) - - def removed(key: K): WithDefault[K, V] = new WithDefault[K, V](underlying.removed(key), defaultValue) - - def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = - new WithDefault[K, V1](underlying.updated(key, value), defaultValue) - - override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) - - override protected def fromSpecific(coll: collection.IterableOnce[(K, V) @uncheckedVariance]^): WithDefault[K, V] = - new WithDefault[K, V](mapFactory.from(coll), defaultValue) - - override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] @uncheckedVariance = - Map.newBuilder.mapResult((p: Map[K, V]) => new WithDefault[K, V](p, defaultValue)) - } - - def empty[K, V]: Map[K, V] = EmptyMap.asInstanceOf[Map[K, V]] - - def from[K, V](it: collection.IterableOnce[(K, V)]^): Map[K, V] = - it match { - case it: Iterable[_] if it.isEmpty => empty[K, V] - case m: Map[K, V] => m - case _ => (newBuilder[K, V] ++= it).result() - } - - def newBuilder[K, V]: Builder[(K, V), Map[K, V]] = new MapBuilderImpl - - @SerialVersionUID(3L) - private object EmptyMap extends AbstractMap[Any, Nothing] with Serializable { - override def size: Int = 0 - override def knownSize: Int = 0 - override def isEmpty: Boolean = true - override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key) - override def contains(key: Any) = false - def get(key: Any): Option[Nothing] = None - override def getOrElse [V1](key: Any, default: => V1): V1 = default - def iterator: Iterator[(Any, Nothing)] = Iterator.empty - override def keysIterator: Iterator[Any] = Iterator.empty - override def valuesIterator: Iterator[Nothing] = Iterator.empty - def updated [V1] (key: Any, value: V1): Map[Any, V1] = new Map1(key, value) - def removed(key: Any): Map[Any, Nothing] = this - override def concat[V2 >: Nothing](suffix: IterableOnce[(Any, V2)]^): Map[Any, V2] = suffix match { - case m: immutable.Map[Any, V2] => m - case _ => super.concat(suffix) - } - } - - @SerialVersionUID(3L) - final class Map1[K, +V](key1: K, value1: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { - override def size: Int = 1 - override def knownSize: Int = 1 - override def isEmpty: Boolean = false - override def apply(key: K): V = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K): Boolean = key == key1 - def get(key: K): Option[V] = - if (key == key1) Some(value1) else None - override def getOrElse [V1 >: V](key: K, default: => V1): V1 = - if (key == key1) value1 else default - def iterator: Iterator[(K, V)] = Iterator.single((key1, value1)) - override def keysIterator: Iterator[K] = Iterator.single(key1) - override def valuesIterator: Iterator[V] = Iterator.single(value1) - def updated[V1 >: V](key: K, value: V1): Map[K, V1] = - if (key == key1) new Map1(key1, value) - else new Map2(key1, value1, key, value) - def removed(key: K): Map[K, V] = - if (key == key1) Map.empty else this - override def foreach[U](f: ((K, V)) => U): Unit = { - f((key1, value1)) - } - override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) - override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) - override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = - if (pred((key1, value1)) != isFlipped) this else Map.empty - override def transform[W](f: (K, V) => W): Map[K, W] = { - val walue1 = f(key1, value1) - if (walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) this.asInstanceOf[Map[K, W]] - else new Map1(key1, walue1) - } - override def hashCode(): Int = { - import scala.util.hashing.MurmurHash3 - var a, b = 0 - val N = 1 - var c = 1 - - var h = MurmurHash3.tuple2Hash(key1, value1) - a += h - b ^= h - c *= h | 1 - - h = MurmurHash3.mapSeed - h = MurmurHash3.mix(h, a) - h = MurmurHash3.mix(h, b) - h = MurmurHash3.mixLast(h, c) - MurmurHash3.finalizeHash(h, N) - } - } - - @SerialVersionUID(3L) - final class Map2[K, +V](key1: K, value1: V, key2: K, value2: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { - override def size: Int = 2 - override def knownSize: Int = 2 - override def isEmpty: Boolean = false - override def apply(key: K): V = - if (key == key1) value1 - else if (key == key2) value2 - else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K): Boolean = (key == key1) || (key == key2) - def get(key: K): Option[V] = - if (key == key1) Some(value1) - else if (key == key2) Some(value2) - else None - override def getOrElse [V1 >: V](key: K, default: => V1): V1 = - if (key == key1) value1 - else if (key == key2) value2 - else default - def iterator: Iterator[(K, V)] = new Map2Iterator[(K, V)] { - override protected def nextResult(k: K, v: V): (K, V) = (k, v) - } - override def keysIterator: Iterator[K] = new Map2Iterator[K] { - override protected def nextResult(k: K, v: V): K = k - } - override def valuesIterator: Iterator[V] = new Map2Iterator[V] { - override protected def nextResult(k: K, v: V): V = v - } - - private abstract class Map2Iterator[A] extends AbstractIterator[A], Pure { - private[this] var i = 0 - override def hasNext: Boolean = i < 2 - override def next(): A = { - val result = i match { - case 0 => nextResult(key1, value1) - case 1 => nextResult(key2, value2) - case _ => Iterator.empty.next() - } - i += 1 - result - } - override def drop(n: Int): Iterator[A] = { i += n; this } - protected def nextResult(k: K, v: V @uncheckedVariance): A - } - def updated[V1 >: V](key: K, value: V1): Map[K, V1] = - if (key == key1) new Map2(key1, value, key2, value2) - else if (key == key2) new Map2(key1, value1, key2, value) - else new Map3(key1, value1, key2, value2, key, value) - def removed(key: K): Map[K, V] = - if (key == key1) new Map1(key2, value2) - else if (key == key2) new Map1(key1, value1) - else this - override def foreach[U](f: ((K, V)) => U): Unit = { - f((key1, value1)); f((key2, value2)) - } - override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) - override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) - override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { - var k1 = null.asInstanceOf[K] - var v1 = null.asInstanceOf[V] - var n = 0 - if (pred((key1, value1)) != isFlipped) { {k1 = key1; v1 = value1}; n += 1} - if (pred((key2, value2)) != isFlipped) { if (n == 0) {k1 = key2; v1 = value2}; n += 1} - - n match { - case 0 => Map.empty - case 1 => new Map1(k1, v1) - case 2 => this - } - } - override def transform[W](f: (K, V) => W): Map[K, W] = { - val walue1 = f(key1, value1) - val walue2 = f(key2, value2) - if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) && - (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]] - else new Map2(key1, walue1, key2, walue2) - } - override def hashCode(): Int = { - import scala.util.hashing.MurmurHash3 - var a, b = 0 - val N = 2 - var c = 1 - - var h = MurmurHash3.tuple2Hash(key1, value1) - a += h - b ^= h - c *= h | 1 - - h = MurmurHash3.tuple2Hash(key2, value2) - a += h - b ^= h - c *= h | 1 - - h = MurmurHash3.mapSeed - h = MurmurHash3.mix(h, a) - h = MurmurHash3.mix(h, b) - h = MurmurHash3.mixLast(h, c) - MurmurHash3.finalizeHash(h, N) - } - } - - @SerialVersionUID(3L) - class Map3[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { - override def size: Int = 3 - override def knownSize: Int = 3 - override def isEmpty: Boolean = false - override def apply(key: K): V = - if (key == key1) value1 - else if (key == key2) value2 - else if (key == key3) value3 - else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K): Boolean = (key == key1) || (key == key2) || (key == key3) - def get(key: K): Option[V] = - if (key == key1) Some(value1) - else if (key == key2) Some(value2) - else if (key == key3) Some(value3) - else None - override def getOrElse [V1 >: V](key: K, default: => V1): V1 = - if (key == key1) value1 - else if (key == key2) value2 - else if (key == key3) value3 - else default - def iterator: Iterator[(K, V)] = new Map3Iterator[(K, V)] { - override protected def nextResult(k: K, v: V): (K, V) = (k, v) - } - override def keysIterator: Iterator[K] = new Map3Iterator[K] { - override protected def nextResult(k: K, v: V): K = k - } - override def valuesIterator: Iterator[V] = new Map3Iterator[V] { - override protected def nextResult(k: K, v: V): V = v - } - - private abstract class Map3Iterator[A] extends AbstractIterator[A], Pure { - private[this] var i = 0 - override def hasNext: Boolean = i < 3 - override def next(): A = { - val result = i match { - case 0 => nextResult(key1, value1) - case 1 => nextResult(key2, value2) - case 2 => nextResult(key3, value3) - case _ => Iterator.empty.next() - } - i += 1 - result - } - override def drop(n: Int): Iterator[A] = { i += n; this } - protected def nextResult(k: K, v: V @uncheckedVariance): A - } - def updated[V1 >: V](key: K, value: V1): Map[K, V1] = - if (key == key1) new Map3(key1, value, key2, value2, key3, value3) - else if (key == key2) new Map3(key1, value1, key2, value, key3, value3) - else if (key == key3) new Map3(key1, value1, key2, value2, key3, value) - else new Map4(key1, value1, key2, value2, key3, value3, key, value) - def removed(key: K): Map[K, V] = - if (key == key1) new Map2(key2, value2, key3, value3) - else if (key == key2) new Map2(key1, value1, key3, value3) - else if (key == key3) new Map2(key1, value1, key2, value2) - else this - override def foreach[U](f: ((K, V)) => U): Unit = { - f((key1, value1)); f((key2, value2)); f((key3, value3)) - } - override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) || p((key3, value3)) - override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) && p((key3, value3)) - override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { - var k1, k2 = null.asInstanceOf[K] - var v1, v2 = null.asInstanceOf[V] - var n = 0 - if (pred((key1, value1)) != isFlipped) { { k1 = key1; v1 = value1 }; n += 1} - if (pred((key2, value2)) != isFlipped) { if (n == 0) { k1 = key2; v1 = value2 } else { k2 = key2; v2 = value2 }; n += 1} - if (pred((key3, value3)) != isFlipped) { if (n == 0) { k1 = key3; v1 = value3 } else if (n == 1) { k2 = key3; v2 = value3 }; n += 1} - - n match { - case 0 => Map.empty - case 1 => new Map1(k1, v1) - case 2 => new Map2(k1, v1, k2, v2) - case 3 => this - } - } - override def transform[W](f: (K, V) => W): Map[K, W] = { - val walue1 = f(key1, value1) - val walue2 = f(key2, value2) - val walue3 = f(key3, value3) - if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) && - (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) && - (walue3.asInstanceOf[AnyRef] eq value3.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]] - else new Map3(key1, walue1, key2, walue2, key3, walue3) - } - override def hashCode(): Int = { - import scala.util.hashing.MurmurHash3 - var a, b = 0 - val N = 3 - var c = 1 - - var h = MurmurHash3.tuple2Hash(key1, value1) - a += h - b ^= h - c *= h | 1 - - h = MurmurHash3.tuple2Hash(key2, value2) - a += h - b ^= h - c *= h | 1 - - h = MurmurHash3.tuple2Hash(key3, value3) - a += h - b ^= h - c *= h | 1 - - h = MurmurHash3.mapSeed - h = MurmurHash3.mix(h, a) - h = MurmurHash3.mix(h, b) - h = MurmurHash3.mixLast(h, c) - MurmurHash3.finalizeHash(h, N) - } - } - - @SerialVersionUID(3L) - final class Map4[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V, key4: K, value4: V) - extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { - - override def size: Int = 4 - override def knownSize: Int = 4 - override def isEmpty: Boolean = false - override def apply(key: K): V = - if (key == key1) value1 - else if (key == key2) value2 - else if (key == key3) value3 - else if (key == key4) value4 - else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K): Boolean = (key == key1) || (key == key2) || (key == key3) || (key == key4) - def get(key: K): Option[V] = - if (key == key1) Some(value1) - else if (key == key2) Some(value2) - else if (key == key3) Some(value3) - else if (key == key4) Some(value4) - else None - override def getOrElse [V1 >: V](key: K, default: => V1): V1 = - if (key == key1) value1 - else if (key == key2) value2 - else if (key == key3) value3 - else if (key == key4) value4 - else default - def iterator: Iterator[(K, V)] = new Map4Iterator[(K, V)] { - override protected def nextResult(k: K, v: V): (K, V) = (k, v) - } - override def keysIterator: Iterator[K] = new Map4Iterator[K] { - override protected def nextResult(k: K, v: V): K = k - } - override def valuesIterator: Iterator[V] = new Map4Iterator[V] { - override protected def nextResult(k: K, v: V): V = v - } - - private abstract class Map4Iterator[A] extends AbstractIterator[A], Pure { - private[this] var i = 0 - override def hasNext: Boolean = i < 4 - override def next(): A = { - val result = i match { - case 0 => nextResult(key1, value1) - case 1 => nextResult(key2, value2) - case 2 => nextResult(key3, value3) - case 3 => nextResult(key4, value4) - case _ => Iterator.empty.next() - } - i += 1 - result - } - override def drop(n: Int): Iterator[A] = { i += n; this } - protected def nextResult(k: K, v: V @uncheckedVariance): A - } - def updated[V1 >: V](key: K, value: V1): Map[K, V1] = - if (key == key1) new Map4(key1, value, key2, value2, key3, value3, key4, value4) - else if (key == key2) new Map4(key1, value1, key2, value, key3, value3, key4, value4) - else if (key == key3) new Map4(key1, value1, key2, value2, key3, value, key4, value4) - else if (key == key4) new Map4(key1, value1, key2, value2, key3, value3, key4, value) - else HashMap.empty[K, V1].updated(key1,value1).updated(key2, value2).updated(key3, value3).updated(key4, value4).updated(key, value) - def removed(key: K): Map[K, V] = - if (key == key1) new Map3(key2, value2, key3, value3, key4, value4) - else if (key == key2) new Map3(key1, value1, key3, value3, key4, value4) - else if (key == key3) new Map3(key1, value1, key2, value2, key4, value4) - else if (key == key4) new Map3(key1, value1, key2, value2, key3, value3) - else this - override def foreach[U](f: ((K, V)) => U): Unit = { - f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4)) - } - override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) || p((key3, value3)) || p((key4, value4)) - override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) && p((key3, value3)) && p((key4, value4)) - override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { - var k1, k2, k3 = null.asInstanceOf[K] - var v1, v2, v3 = null.asInstanceOf[V] - var n = 0 - if (pred((key1, value1)) != isFlipped) { { k1 = key1; v1 = value1 }; n += 1} - if (pred((key2, value2)) != isFlipped) { if (n == 0) { k1 = key2; v1 = value2 } else { k2 = key2; v2 = value2 }; n += 1} - if (pred((key3, value3)) != isFlipped) { if (n == 0) { k1 = key3; v1 = value3 } else if (n == 1) { k2 = key3; v2 = value3 } else { k3 = key3; v3 = value3}; n += 1} - if (pred((key4, value4)) != isFlipped) { if (n == 0) { k1 = key4; v1 = value4 } else if (n == 1) { k2 = key4; v2 = value4 } else if (n == 2) { k3 = key4; v3 = value4 }; n += 1} - - n match { - case 0 => Map.empty - case 1 => new Map1(k1, v1) - case 2 => new Map2(k1, v1, k2, v2) - case 3 => new Map3(k1, v1, k2, v2, k3, v3) - case 4 => this - } - } - override def transform[W](f: (K, V) => W): Map[K, W] = { - val walue1 = f(key1, value1) - val walue2 = f(key2, value2) - val walue3 = f(key3, value3) - val walue4 = f(key4, value4) - if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) && - (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) && - (walue3.asInstanceOf[AnyRef] eq value3.asInstanceOf[AnyRef]) && - (walue4.asInstanceOf[AnyRef] eq value4.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]] - else new Map4(key1, walue1, key2, walue2, key3, walue3, key4, walue4) - } - private[immutable] def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): builder.type = - builder.addOne(key1, value1).addOne(key2, value2).addOne(key3, value3).addOne(key4, value4) - override def hashCode(): Int = { - import scala.util.hashing.MurmurHash3 - var a, b = 0 - val N = 4 - var c = 1 - - var h = MurmurHash3.tuple2Hash(key1, value1) - a += h - b ^= h - c *= h | 1 - - h = MurmurHash3.tuple2Hash(key2, value2) - a += h - b ^= h - c *= h | 1 - - h = MurmurHash3.tuple2Hash(key3, value3) - a += h - b ^= h - c *= h | 1 - - h = MurmurHash3.tuple2Hash(key4, value4) - a += h - b ^= h - c *= h | 1 - - h = MurmurHash3.mapSeed - h = MurmurHash3.mix(h, a) - h = MurmurHash3.mix(h, b) - h = MurmurHash3.mixLast(h, c) - MurmurHash3.finalizeHash(h, N) - } - } -} - -/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ -abstract class AbstractMap[K, +V] extends scala.collection.AbstractMap[K, V] with Map[K, V] - -private[immutable] final class MapBuilderImpl[K, V] extends ReusableBuilder[(K, V), Map[K, V]] { - private[this] var elems: Map[K, V] @uncheckedCaptures = Map.empty - private[this] var switchedToHashMapBuilder: Boolean = false - private[this] var hashMapBuilder: HashMapBuilder[K, V] @uncheckedCaptures = _ - - private[immutable] def getOrElse[V0 >: V](key: K, value: V0): V0 = - if (hashMapBuilder ne null) hashMapBuilder.getOrElse(key, value) - else elems.getOrElse(key, value) - - override def clear(): Unit = { - elems = Map.empty - if (hashMapBuilder != null) { - hashMapBuilder.clear() - } - switchedToHashMapBuilder = false - } - - override def result(): Map[K, V] = - if (switchedToHashMapBuilder) hashMapBuilder.result() else elems - - def addOne(key: K, value: V): this.type = { - if (switchedToHashMapBuilder) { - hashMapBuilder.addOne(key, value) - } else if (elems.size < 4) { - elems = elems.updated(key, value) - } else { - // assert(elems.size == 4) - if (elems.contains(key)) { - elems = elems.updated(key, value) - } else { - switchedToHashMapBuilder = true - if (hashMapBuilder == null) { - hashMapBuilder = new HashMapBuilder - } - elems.asInstanceOf[Map4[K, V]].buildTo(hashMapBuilder) - hashMapBuilder.addOne(key, value) - } - } - - this - } - - def addOne(elem: (K, V)) = addOne(elem._1, elem._2) - - override def addAll(xs: IterableOnce[(K, V)]^): this.type = - if (switchedToHashMapBuilder) { - hashMapBuilder.addAll(xs) - this - } else { - super.addAll(xs) - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/NumericRange.scala b/tests/pos-special/stdlib/collection/immutable/NumericRange.scala deleted file mode 100644 index f26d9728e5ad..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/NumericRange.scala +++ /dev/null @@ -1,509 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.immutable - -import scala.collection.Stepper.EfficientSplit -import scala.collection.{AbstractIterator, AnyStepper, IterableFactoryDefaults, Iterator, Stepper, StepperShape} -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** `NumericRange` is a more generic version of the - * `Range` class which works with arbitrary types. - * It must be supplied with an `Integral` implementation of the - * range type. - * - * Factories for likely types include `Range.BigInt`, `Range.Long`, - * and `Range.BigDecimal`. `Range.Int` exists for completeness, but - * the `Int`-based `scala.Range` should be more performant. - * - * {{{ - * val r1 = Range(0, 100, 1) - * val veryBig = Int.MaxValue.toLong + 1 - * val r2 = Range.Long(veryBig, veryBig + 100, 1) - * assert(r1 sameElements r2.map(_ - veryBig)) - * }}} - * - * @define Coll `NumericRange` - * @define coll numeric range - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(3L) -sealed class NumericRange[T]( - val start: T, - val end: T, - val step: T, - val isInclusive: Boolean -)(implicit - num: Integral[T] -) - extends AbstractSeq[T] - with IndexedSeq[T] - with IndexedSeqOps[T, IndexedSeq, IndexedSeq[T]] - with StrictOptimizedSeqOps[T, IndexedSeq, IndexedSeq[T]] - with IterableFactoryDefaults[T, IndexedSeq] - with Serializable { self => - - override def iterator: Iterator[T] = new NumericRange.NumericRangeIterator(this, num) - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = { - import scala.collection.convert._ - import impl._ - val s = shape.shape match { - case StepperShape.IntShape => new IntNumericRangeStepper (this.asInstanceOf[NumericRange[Int]], 0, length) - case StepperShape.LongShape => new LongNumericRangeStepper (this.asInstanceOf[NumericRange[Long]], 0, length) - case _ => shape.parUnbox(new AnyNumericRangeStepper[T](this, 0, length).asInstanceOf[AnyStepper[T] with EfficientSplit]) - } - s.asInstanceOf[S with EfficientSplit] - } - - - /** Note that NumericRange must be invariant so that constructs - * such as "1L to 10 by 5" do not infer the range type as AnyVal. - */ - import num._ - - // See comment in Range for why this must be lazy. - override lazy val length: Int = NumericRange.count(start, end, step, isInclusive) - override lazy val isEmpty: Boolean = ( - (num.gt(start, end) && num.gt(step, num.zero)) - || (num.lt(start, end) && num.lt(step, num.zero)) - || (num.equiv(start, end) && !isInclusive) - ) - override def last: T = - if (isEmpty) Nil.head - else locationAfterN(length - 1) - override def init: NumericRange[T] = - if (isEmpty) Nil.init - else new NumericRange(start, end - step, step, isInclusive) - - override def head: T = if (isEmpty) Nil.head else start - override def tail: NumericRange[T] = - if (isEmpty) Nil.tail - else if(isInclusive) new NumericRange.Inclusive(start + step, end, step) - else new NumericRange.Exclusive(start + step, end, step) - - /** Create a new range with the start and end values of this range and - * a new `step`. - */ - def by(newStep: T): NumericRange[T] = copy(start, end, newStep) - - - /** Create a copy of this range. - */ - def copy(start: T, end: T, step: T): NumericRange[T] = - new NumericRange(start, end, step, isInclusive) - - @throws[IndexOutOfBoundsException] - def apply(idx: Int): T = { - if (idx < 0 || idx >= length) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${length - 1})") - else locationAfterN(idx) - } - - override def foreach[@specialized(Specializable.Unit) U](f: T => U): Unit = { - var count = 0 - var current = start - while (count < length) { - f(current) - current += step - count += 1 - } - } - - // TODO: these private methods are straight copies from Range, duplicated - // to guard against any (most likely illusory) performance drop. They should - // be eliminated one way or another. - - // Tests whether a number is within the endpoints, without testing - // whether it is a member of the sequence (i.e. when step > 1.) - private def isWithinBoundaries(elem: T) = !isEmpty && ( - (step > zero && start <= elem && elem <= last ) || - (step < zero && last <= elem && elem <= start) - ) - // Methods like apply throw exceptions on invalid n, but methods like take/drop - // are forgiving: therefore the checks are with the methods. - private def locationAfterN(n: Int): T = start + (step * fromInt(n)) - - private def crossesTheEndAfterN(n: Int): Boolean = { - // if we're sure that subtraction in the context of T won't overflow, we use this function - // to calculate the length of the range - def unsafeRangeLength(r: NumericRange[T]): T = { - val diff = num.minus(r.end, r.start) - val quotient = num.quot(diff, r.step) - val remainder = num.rem(diff, r.step) - if (!r.isInclusive && num.equiv(remainder, num.zero)) - num.max(quotient, num.zero) - else - num.max(num.plus(quotient, num.one), num.zero) - } - - // detects whether value can survive a bidirectional trip to -and then from- Int. - def fitsInInteger(value: T): Boolean = num.equiv(num.fromInt(num.toInt(value)), value) - - val stepIsInTheSameDirectionAsStartToEndVector = - (num.gt(end, start) && num.gt(step, num.zero)) || (num.lt(end, start) && num.sign(step) == -num.one) - - if (num.equiv(start, end) || n <= 0 || !stepIsInTheSameDirectionAsStartToEndVector) return n >= 1 - - val sameSign = num.equiv(num.sign(start), num.sign(end)) - - if (sameSign) { // subtraction is safe - val len = unsafeRangeLength(this) - if (fitsInInteger(len)) n >= num.toInt(len) else num.gteq(num.fromInt(n), len) - } else { - // split to two ranges, which subtraction is safe in both of them (around zero) - val stepsRemainderToZero = num.rem(start, step) - val walksOnZero = num.equiv(stepsRemainderToZero, num.zero) - val closestToZero = if (walksOnZero) -step else stepsRemainderToZero - - /* - When splitting into two ranges, we should be super-careful about one of the sides hitting MinValue of T, - so we take two steps smaller than zero to ensure unsafeRangeLength won't overflow (taking one step may overflow depending on the step). - Same thing happens for MaxValue from zero, so we take one step further to ensure the safety of unsafeRangeLength. - After performing such operation, there are some elements remaining in between and around zero, - which their length is represented by carry. - */ - val (l: NumericRange[T], r: NumericRange[T], carry: Int) = - if (num.lt(start, num.zero)) { - if (walksOnZero) { - val twoStepsAfterLargestNegativeNumber = num.plus(closestToZero, num.times(step, num.fromInt(2))) - (NumericRange(start, closestToZero, step), copy(twoStepsAfterLargestNegativeNumber, end, step), 2) - } else { - (NumericRange(start, closestToZero, step), copy(num.plus(closestToZero, step), end, step), 1) - } - } else { - if (walksOnZero) { - val twoStepsAfterZero = num.times(step, num.fromInt(2)) - (copy(twoStepsAfterZero, end, step), NumericRange.inclusive(start, -step, step), 2) - } else { - val twoStepsAfterSmallestPositiveNumber = num.plus(closestToZero, num.times(step, num.fromInt(2))) - (copy(twoStepsAfterSmallestPositiveNumber, end, step), NumericRange.inclusive(start, closestToZero, step), 2) - } - } - - val leftLength = unsafeRangeLength(l) - val rightLength = unsafeRangeLength(r) - - // instead of `n >= rightLength + leftLength + curry` which may cause addition overflow, - // this can be used `(n - leftLength - curry) >= rightLength` (Both in Int and T, depends on whether the lengths fit in Int) - if (fitsInInteger(leftLength) && fitsInInteger(rightLength)) - n - num.toInt(leftLength) - carry >= num.toInt(rightLength) - else - num.gteq(num.minus(num.minus(num.fromInt(n), leftLength), num.fromInt(carry)), rightLength) - } - } - - // When one drops everything. Can't ever have unchecked operations - // like "end + 1" or "end - 1" because ranges involving Int.{ MinValue, MaxValue } - // will overflow. This creates an exclusive range where start == end - // based on the given value. - private def newEmptyRange(value: T) = NumericRange(value, value, step) - - override def take(n: Int): NumericRange[T] = { - if (n <= 0 || isEmpty) newEmptyRange(start) - else if (crossesTheEndAfterN(n)) this - else new NumericRange.Inclusive(start, locationAfterN(n - 1), step) - } - - override def drop(n: Int): NumericRange[T] = { - if (n <= 0 || isEmpty) this - else if (crossesTheEndAfterN(n)) newEmptyRange(end) - else copy(locationAfterN(n), end, step) - } - - override def splitAt(n: Int): (NumericRange[T], NumericRange[T]) = (take(n), drop(n)) - - override def reverse: NumericRange[T] = - if (isEmpty) this - else { - val newStep = -step - if (num.sign(newStep) == num.sign(step)) { - throw new ArithmeticException("number type is unsigned, and .reverse requires a negative step") - } else new NumericRange.Inclusive(last, start, newStep) - } - - import NumericRange.defaultOrdering - - override def min[T1 >: T](implicit ord: Ordering[T1]): T = - // We can take the fast path: - // - If the Integral of this NumericRange is also the requested Ordering - // (Integral <: Ordering). This can happen for custom Integral types. - // - The Ordering is the default Ordering of a well-known Integral type. - if ((ord eq num) || defaultOrdering.get(num).exists(ord eq _)) { - if (num.sign(step) > zero) head - else last - } else super.min(ord) - - override def max[T1 >: T](implicit ord: Ordering[T1]): T = - // See comment for fast path in min(). - if ((ord eq num) || defaultOrdering.get(num).exists(ord eq _)) { - if (num.sign(step) > zero) last - else head - } else super.max(ord) - - // a well-typed contains method. - def containsTyped(x: T): Boolean = - isWithinBoundaries(x) && (((x - start) % step) == zero) - - override def contains[A1 >: T](x: A1): Boolean = - try containsTyped(x.asInstanceOf[T]) - catch { case _: ClassCastException => false } - - override def sum[B >: T](implicit num: Numeric[B]): B = { - if (isEmpty) num.zero - else if (size == 1) head - else { - // If there is no overflow, use arithmetic series formula - // a + ... (n terms total) ... + b = n*(a+b)/2 - if ((num eq scala.math.Numeric.IntIsIntegral)|| - (num eq scala.math.Numeric.ShortIsIntegral)|| - (num eq scala.math.Numeric.ByteIsIntegral)|| - (num eq scala.math.Numeric.CharIsIntegral)) { - // We can do math with no overflow in a Long--easy - val exact = (size * ((num toLong head) + (num toInt last))) / 2 - num fromInt exact.toInt - } - else if (num eq scala.math.Numeric.LongIsIntegral) { - // Uh-oh, might be overflow, so we have to divide before we overflow. - // Either numRangeElements or (head + last) must be even, so divide the even one before multiplying - val a = head.toLong - val b = last.toLong - val ans = - if ((size & 1) == 0) (size / 2) * (a + b) - else size * { - // Sum is even, but we might overflow it, so divide in pieces and add back remainder - val ha = a/2 - val hb = b/2 - ha + hb + ((a - 2*ha) + (b - 2*hb)) / 2 - } - ans.asInstanceOf[B] - } - else if ((num eq scala.math.Numeric.BigIntIsIntegral) || - (num eq scala.math.Numeric.BigDecimalAsIfIntegral)) { - // No overflow, so we can use arithmetic series formula directly - // (not going to worry about running out of memory) - val numAsIntegral = num.asInstanceOf[Integral[B]] - import numAsIntegral._ - ((num fromInt size) * (head + last)) / (num fromInt 2) - } - else { - // User provided custom Numeric, so we cannot rely on arithmetic series formula (e.g. won't work on something like Z_6) - if (isEmpty) num.zero - else { - var acc = num.zero - var i = head - var idx = 0 - while(idx < length) { - acc = num.plus(acc, i) - i = i + step - idx = idx + 1 - } - acc - } - } - } - } - - override lazy val hashCode: Int = super.hashCode() - override protected final def applyPreferredMaxLength: Int = Int.MaxValue - - override def equals(other: Any): Boolean = other match { - case x: NumericRange[_] => - (x canEqual this) && (length == x.length) && ( - (isEmpty) || // all empty sequences are equal - (start == x.start && last == x.last) // same length and same endpoints implies equality - ) - case _ => - super.equals(other) - } - - override def toString: String = { - val empty = if (isEmpty) "empty " else "" - val preposition = if (isInclusive) "to" else "until" - val stepped = if (step == 1) "" else s" by $step" - s"${empty}NumericRange $start $preposition $end$stepped" - } - - override protected[this] def className = "NumericRange" -} - -/** A companion object for numeric ranges. - * @define Coll `NumericRange` - * @define coll numeric range - */ -object NumericRange { - private def bigDecimalCheckUnderflow[T](start: T, end: T, step: T)(implicit num: Integral[T]): Unit = { - def FAIL(boundary: T, step: T): Unit = { - val msg = boundary match { - case bd: BigDecimal => s"Precision ${bd.mc.getPrecision}" - case _ => "Precision" - } - throw new IllegalArgumentException( - s"$msg inadequate to represent steps of size $step near $boundary" - ) - } - if (num.minus(num.plus(start, step), start) != step) FAIL(start, step) - if (num.minus(end, num.minus(end, step)) != step) FAIL(end, step) - } - - /** Calculates the number of elements in a range given start, end, step, and - * whether or not it is inclusive. Throws an exception if step == 0 or - * the number of elements exceeds the maximum Int. - */ - def count[T](start: T, end: T, step: T, isInclusive: Boolean)(implicit num: Integral[T]): Int = { - val zero = num.zero - val upward = num.lt(start, end) - val posStep = num.gt(step, zero) - - if (step == zero) throw new IllegalArgumentException("step cannot be 0.") - else if (start == end) if (isInclusive) 1 else 0 - else if (upward != posStep) 0 - else { - /* We have to be frightfully paranoid about running out of range. - * We also can't assume that the numbers will fit in a Long. - * We will assume that if a > 0, -a can be represented, and if - * a < 0, -a+1 can be represented. We also assume that if we - * can't fit in Int, we can represent 2*Int.MaxValue+3 (at least). - * And we assume that numbers wrap rather than cap when they overflow. - */ - // Check whether we can short-circuit by deferring to Int range. - val startint = num.toInt(start) - if (start == num.fromInt(startint)) { - val endint = num.toInt(end) - if (end == num.fromInt(endint)) { - val stepint = num.toInt(step) - if (step == num.fromInt(stepint)) { - return { - if (isInclusive) Range.inclusive(startint, endint, stepint).length - else Range (startint, endint, stepint).length - } - } - } - } - // If we reach this point, deferring to Int failed. - // Numbers may be big. - if (num.isInstanceOf[Numeric.BigDecimalAsIfIntegral]) { - bigDecimalCheckUnderflow(start, end, step) // Throw exception if math is inaccurate (including no progress at all) - } - val one = num.one - val limit = num.fromInt(Int.MaxValue) - def check(t: T): T = - if (num.gt(t, limit)) throw new IllegalArgumentException("More than Int.MaxValue elements.") - else t - // If the range crosses zero, it might overflow when subtracted - val startside = num.sign(start) - val endside = num.sign(end) - num.toInt{ - if (num.gteq(num.times(startside, endside), zero)) { - // We're sure we can subtract these numbers. - // Note that we do not use .rem because of different conventions for Long and BigInt - val diff = num.minus(end, start) - val quotient = check(num.quot(diff, step)) - val remainder = num.minus(diff, num.times(quotient, step)) - if (!isInclusive && zero == remainder) quotient else check(num.plus(quotient, one)) - } - else { - // We might not even be able to subtract these numbers. - // Jump in three pieces: - // * start to -1 or 1, whichever is closer (waypointA) - // * one step, which will take us at least to 0 (ends at waypointB) - // * (except with really small numbers) - // * there to the end - val negone = num.fromInt(-1) - val startlim = if (posStep) negone else one - //Use start value if the start value is closer to zero than startlim - // * e.g. .5 is closer to zero than 1 and -.5 is closer to zero than -1 - val startdiff = { - if ((posStep && num.lt(startlim, start)) || (!posStep && num.gt(startlim, start))) - start - else - num.minus(startlim, start) - } - val startq = check(num.quot(startdiff, step)) - val waypointA = if (startq == zero) start else num.plus(start, num.times(startq, step)) - val waypointB = num.plus(waypointA, step) - check { - if (num.lt(waypointB, end) != upward) { - // No last piece - if (isInclusive && waypointB == end) num.plus(startq, num.fromInt(2)) - else num.plus(startq, one) - } - else { - // There is a last piece - val enddiff = num.minus(end,waypointB) - val endq = check(num.quot(enddiff, step)) - val last = if (endq == zero) waypointB else num.plus(waypointB, num.times(endq, step)) - // Now we have to tally up all the pieces - // 1 for the initial value - // startq steps to waypointA - // 1 step to waypointB - // endq steps to the end (one less if !isInclusive and last==end) - num.plus(startq, num.plus(endq, if (!isInclusive && last==end) one else num.fromInt(2))) - } - } - } - } - } - } - - @SerialVersionUID(3L) - class Inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]) - extends NumericRange(start, end, step, true) { - override def copy(start: T, end: T, step: T): Inclusive[T] = - NumericRange.inclusive(start, end, step) - - def exclusive: Exclusive[T] = NumericRange(start, end, step) - } - - @SerialVersionUID(3L) - class Exclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]) - extends NumericRange(start, end, step, false) { - override def copy(start: T, end: T, step: T): Exclusive[T] = - NumericRange(start, end, step) - - def inclusive: Inclusive[T] = NumericRange.inclusive(start, end, step) - } - - def apply[T](start: T, end: T, step: T)(implicit num: Integral[T]): Exclusive[T] = - new Exclusive(start, end, step) - def inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]): Inclusive[T] = - new Inclusive(start, end, step) - - private[collection] val defaultOrdering = Map[Numeric[_], Ordering[_]]( - Numeric.BigIntIsIntegral -> Ordering.BigInt, - Numeric.IntIsIntegral -> Ordering.Int, - Numeric.ShortIsIntegral -> Ordering.Short, - Numeric.ByteIsIntegral -> Ordering.Byte, - Numeric.CharIsIntegral -> Ordering.Char, - Numeric.LongIsIntegral -> Ordering.Long, - Numeric.BigDecimalAsIfIntegral -> Ordering.BigDecimal - ) - - @SerialVersionUID(3L) - private final class NumericRangeIterator[T](self: NumericRange[T], num: Integral[T]) extends AbstractIterator[T] with Serializable { - import num.mkNumericOps - - private[this] var _hasNext = !self.isEmpty - private[this] var _next: T @uncheckedCaptures = self.start - private[this] val lastElement: T = if (_hasNext) self.last else self.start - override def knownSize: Int = if (_hasNext) num.toInt((lastElement - _next) / self.step) + 1 else 0 - def hasNext: Boolean = _hasNext - def next(): T = { - if (!_hasNext) Iterator.empty.next() - val value = _next - _hasNext = value != lastElement - _next = num.plus(value, self.step) - value - } - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/Queue.scala b/tests/pos-special/stdlib/collection/immutable/Queue.scala deleted file mode 100644 index 929c79ce588a..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/Queue.scala +++ /dev/null @@ -1,218 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package immutable - -import scala.collection.generic.DefaultSerializable -import scala.collection.mutable.{Builder, ListBuffer} -import language.experimental.captureChecking - -/** `Queue` objects implement data structures that allow to - * insert and retrieve elements in a first-in-first-out (FIFO) manner. - * - * `Queue` is implemented as a pair of `List`s, one containing the ''in'' elements and the other the ''out'' elements. - * Elements are added to the ''in'' list and removed from the ''out'' list. When the ''out'' list runs dry, the - * queue is pivoted by replacing the ''out'' list by ''in.reverse'', and ''in'' by ''Nil''. - * - * Adding items to the queue always has cost `O(1)`. Removing items has cost `O(1)`, except in the case - * where a pivot is required, in which case, a cost of `O(n)` is incurred, where `n` is the number of elements in the queue. When this happens, - * `n` remove operations with `O(1)` cost are guaranteed. Removing an item is on average `O(1)`. - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#immutable-queues "Scala's Collection Library overview"]] - * section on `Immutable Queues` for more information. - * - * @define Coll `immutable.Queue` - * @define coll immutable queue - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ - -sealed class Queue[+A] protected(protected val in: List[A], protected val out: List[A]) - extends AbstractSeq[A] - with LinearSeq[A] - with LinearSeqOps[A, Queue, Queue[A]] - with StrictOptimizedLinearSeqOps[A, Queue, Queue[A]] - with StrictOptimizedSeqOps[A, Queue, Queue[A]] - with IterableFactoryDefaults[A, Queue] - with DefaultSerializable { - - override def iterableFactory: SeqFactory[Queue] = Queue - - /** Returns the `n`-th element of this queue. - * The first element is at position `0`. - * - * @param n index of the element to return - * @return the element at position `n` in this queue. - * @throws NoSuchElementException if the queue is too short. - */ - override def apply(n: Int): A = { - def indexOutOfRange(): Nothing = throw new IndexOutOfBoundsException(n.toString) - - var index = 0 - var curr = out - - while (index < n && curr.nonEmpty) { - index += 1 - curr = curr.tail - } - - if (index == n) { - if (curr.nonEmpty) curr.head - else if (in.nonEmpty) in.last - else indexOutOfRange() - } else { - val indexFromBack = n - index - val inLength = in.length - if (indexFromBack >= inLength) indexOutOfRange() - else in(inLength - indexFromBack - 1) - } - } - - /** Returns the elements in the list as an iterator - */ - override def iterator: Iterator[A] = out.iterator.concat(in.reverse) - - /** Checks if the queue is empty. - * - * @return true, iff there is no element in the queue. - */ - override def isEmpty: Boolean = in.isEmpty && out.isEmpty - - override def head: A = - if (out.nonEmpty) out.head - else if (in.nonEmpty) in.last - else throw new NoSuchElementException("head on empty queue") - - override def tail: Queue[A] = - if (out.nonEmpty) new Queue(in, out.tail) - else if (in.nonEmpty) new Queue(Nil, in.reverse.tail) - else throw new NoSuchElementException("tail on empty queue") - - override def last: A = - if (in.nonEmpty) in.head - else if (out.nonEmpty) out.last - else throw new NoSuchElementException("last on empty queue") - - /* This is made to avoid inefficient implementation of iterator. */ - override def forall(p: A => Boolean): Boolean = - in.forall(p) && out.forall(p) - - /* This is made to avoid inefficient implementation of iterator. */ - override def exists(p: A => Boolean): Boolean = - in.exists(p) || out.exists(p) - - override protected[this] def className = "Queue" - - /** Returns the length of the queue. */ - override def length: Int = in.length + out.length - - override def prepended[B >: A](elem: B): Queue[B] = new Queue(in, elem :: out) - - override def appended[B >: A](elem: B): Queue[B] = enqueue(elem) - - override def appendedAll[B >: A](that: scala.collection.IterableOnce[B]^): Queue[B] = { - val newIn = that match { - case that: Queue[B] => that.in ++ (that.out reverse_::: this.in) - case that: List[B] => that reverse_::: this.in - case _ => - var result: List[B] = this.in - val iter = that.iterator - while (iter.hasNext) { - result = iter.next() :: result - } - result - } - if (newIn eq this.in) this else new Queue[B](newIn, this.out) - } - - /** Creates a new queue with element added at the end - * of the old queue. - * - * @param elem the element to insert - */ - def enqueue[B >: A](elem: B): Queue[B] = new Queue(elem :: in, out) - - /** Creates a new queue with all elements provided by an `Iterable` object - * added at the end of the old queue. - * - * The elements are appended in the order they are given out by the - * iterator. - * - * @param iter an iterable object - */ - @deprecated("Use `enqueueAll` instead of `enqueue` to enqueue a collection of elements", "2.13.0") - @`inline` final def enqueue[B >: A](iter: scala.collection.Iterable[B]) = enqueueAll(iter) - - /** Creates a new queue with all elements provided by an `Iterable` object - * added at the end of the old queue. - * - * The elements are appended in the order they are given out by the - * iterator. - * - * @param iter an iterable object - */ - def enqueueAll[B >: A](iter: scala.collection.Iterable[B]): Queue[B] = appendedAll(iter) - - /** Returns a tuple with the first element in the queue, - * and a new queue with this element removed. - * - * @throws NoSuchElementException - * @return the first element of the queue. - */ - def dequeue: (A, Queue[A]) = out match { - case Nil if !in.isEmpty => val rev = in.reverse ; (rev.head, new Queue(Nil, rev.tail)) - case x :: xs => (x, new Queue(in, xs)) - case _ => throw new NoSuchElementException("dequeue on empty queue") - } - - /** Optionally retrieves the first element and a queue of the remaining elements. - * - * @return A tuple of the first element of the queue, and a new queue with this element removed. - * If the queue is empty, `None` is returned. - */ - def dequeueOption: Option[(A, Queue[A])] = if(isEmpty) None else Some(dequeue) - - /** Returns the first element in the queue, or throws an error if there - * is no element contained in the queue. - * - * @throws NoSuchElementException - * @return the first element. - */ - def front: A = head - - /** Returns a string representation of this queue. - */ - override def toString(): String = mkString("Queue(", ", ", ")") -} - -/** $factoryInfo - * @define Coll `immutable.Queue` - * @define coll immutable queue - */ -@SerialVersionUID(3L) -object Queue extends StrictOptimizedSeqFactory[Queue] { - def newBuilder[sealed A]: Builder[A, Queue[A]] = new ListBuffer[A] mapResult (x => new Queue[A](Nil, x)) - - def from[A](source: IterableOnce[A]^): Queue[A] = source match { - case q: Queue[A] => q - case _ => - val list = List.from(source) - if (list.isEmpty) empty - else new Queue(Nil, list) - } - - def empty[A]: Queue[A] = EmptyQueue - override def apply[A](xs: A*): Queue[A] = new Queue[A](Nil, xs.toList) - - private object EmptyQueue extends Queue[Nothing](Nil, Nil) { } -} diff --git a/tests/pos-special/stdlib/collection/immutable/Range.scala b/tests/pos-special/stdlib/collection/immutable/Range.scala deleted file mode 100644 index 459591d1a9cb..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/Range.scala +++ /dev/null @@ -1,673 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.immutable - -import scala.collection.Stepper.EfficientSplit -import scala.collection.convert.impl.RangeStepper -import scala.collection.{AbstractIterator, AnyStepper, IterableFactoryDefaults, Iterator, Stepper, StepperShape} -import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking - -/** The `Range` class represents integer values in range - * ''[start;end)'' with non-zero step value `step`. - * It's a special case of an indexed sequence. - * For example: - * - * {{{ - * val r1 = 0 until 10 - * val r2 = r1.start until r1.end by r1.step + 1 - * println(r2.length) // = 5 - * }}} - * - * Ranges that contain more than `Int.MaxValue` elements can be created, but - * these overfull ranges have only limited capabilities. Any method that - * could require a collection of over `Int.MaxValue` length to be created, or - * could be asked to index beyond `Int.MaxValue` elements will throw an - * exception. Overfull ranges can safely be reduced in size by changing - * the step size (e.g. `by 3`) or taking/dropping elements. `contains`, - * `equals`, and access to the ends of the range (`head`, `last`, `tail`, - * `init`) are also permitted on overfull ranges. - * - * @param start the start of this range. - * @param end the end of the range. For exclusive ranges, e.g. - * `Range(0,3)` or `(0 until 3)`, this is one - * step past the last one in the range. For inclusive - * ranges, e.g. `Range.inclusive(0,3)` or `(0 to 3)`, - * it may be in the range if it is not skipped by the step size. - * To find the last element inside a non-empty range, - * use `last` instead. - * @param step the step for the range. - * - * @define coll range - * @define mayNotTerminateInf - * @define willNotTerminateInf - * @define doesNotUseBuilders - * '''Note:''' this method does not use builders to construct a new range, - * and its complexity is O(1). - */ -@SerialVersionUID(3L) -sealed abstract class Range( - val start: Int, - val end: Int, - val step: Int -) - extends AbstractSeq[Int] - with IndexedSeq[Int] - with IndexedSeqOps[Int, IndexedSeq, IndexedSeq[Int]] - with StrictOptimizedSeqOps[Int, IndexedSeq, IndexedSeq[Int]] - with IterableFactoryDefaults[Int, IndexedSeq] - with Serializable { range => - - final override def iterator: Iterator[Int] = new RangeIterator(start, step, lastElement, isEmpty) - - override final def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = { - val st = new RangeStepper(start, step, 0, length) - val r = - if (shape.shape == StepperShape.IntShape) st - else { - assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") - AnyStepper.ofParIntStepper(st) - } - r.asInstanceOf[S with EfficientSplit] - } - - private[this] def gap = end.toLong - start.toLong - private[this] def isExact = gap % step == 0 - private[this] def hasStub = isInclusive || !isExact - private[this] def longLength = gap / step + ( if (hasStub) 1 else 0 ) - - def isInclusive: Boolean - - final override val isEmpty: Boolean = ( - (start > end && step > 0) - || (start < end && step < 0) - || (start == end && !isInclusive) - ) - - private[this] val numRangeElements: Int = { - if (step == 0) throw new IllegalArgumentException("step cannot be 0.") - else if (isEmpty) 0 - else { - val len = longLength - if (len > scala.Int.MaxValue) -1 - else len.toInt - } - } - - final def length = if (numRangeElements < 0) fail() else numRangeElements - - // This field has a sensible value only for non-empty ranges - private[this] val lastElement = step match { - case 1 => if (isInclusive) end else end-1 - case -1 => if (isInclusive) end else end+1 - case _ => - val remainder = (gap % step).toInt - if (remainder != 0) end - remainder - else if (isInclusive) end - else end - step - } - - /** The last element of this range. This method will return the correct value - * even if there are too many elements to iterate over. - */ - final override def last: Int = - if (isEmpty) throw Range.emptyRangeError("last") else lastElement - final override def head: Int = - if (isEmpty) throw Range.emptyRangeError("head") else start - - /** Creates a new range containing all the elements of this range except the last one. - * - * $doesNotUseBuilders - * - * @return a new range consisting of all the elements of this range except the last one. - */ - final override def init: Range = - if (isEmpty) throw Range.emptyRangeError("init") else dropRight(1) - - /** Creates a new range containing all the elements of this range except the first one. - * - * $doesNotUseBuilders - * - * @return a new range consisting of all the elements of this range except the first one. - */ - final override def tail: Range = { - if (isEmpty) throw Range.emptyRangeError("tail") - if (numRangeElements == 1) newEmptyRange(end) - else if(isInclusive) new Range.Inclusive(start + step, end, step) - else new Range.Exclusive(start + step, end, step) - } - - override def map[B](f: Int => B): IndexedSeq[B] = { - validateMaxLength() - super.map(f) - } - - final protected def copy(start: Int = start, end: Int = end, step: Int = step, isInclusive: Boolean = isInclusive): Range = - if(isInclusive) new Range.Inclusive(start, end, step) else new Range.Exclusive(start, end, step) - - /** Create a new range with the `start` and `end` values of this range and - * a new `step`. - * - * @return a new range with a different step - */ - final def by(step: Int): Range = copy(start, end, step) - - // Check cannot be evaluated eagerly because we have a pattern where - // ranges are constructed like: "x to y by z" The "x to y" piece - // should not trigger an exception. So the calculation is delayed, - // which means it will not fail fast for those cases where failing was - // correct. - private[this] def validateMaxLength(): Unit = { - if (numRangeElements < 0) - fail() - } - private[this] def description = "%d %s %d by %s".format(start, if (isInclusive) "to" else "until", end, step) - private[this] def fail() = throw new IllegalArgumentException(description + ": seqs cannot contain more than Int.MaxValue elements.") - - @throws[IndexOutOfBoundsException] - final def apply(idx: Int): Int = { - validateMaxLength() - if (idx < 0 || idx >= numRangeElements) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${numRangeElements-1})") - else start + (step * idx) - } - - /*@`inline`*/ final override def foreach[@specialized(Unit) U](f: Int => U): Unit = { - // Implementation chosen on the basis of favorable microbenchmarks - // Note--initialization catches step == 0 so we don't need to here - if (!isEmpty) { - var i = start - while (true) { - f(i) - if (i == lastElement) return - i += step - } - } - } - - override final def indexOf[@specialized(Int) B >: Int](elem: B, from: Int = 0): Int = - elem match { - case i: Int => - val pos = posOf(i) - if (pos >= from) pos else -1 - case _ => super.indexOf(elem, from) - } - - override final def lastIndexOf[@specialized(Int) B >: Int](elem: B, end: Int = length - 1): Int = - elem match { - case i: Int => - val pos = posOf(i) - if (pos <= end) pos else -1 - case _ => super.lastIndexOf(elem, end) - } - - private[this] def posOf(i: Int): Int = - if (contains(i)) (i - start) / step else -1 - - override def sameElements[B >: Int](that: IterableOnce[B]^): Boolean = that match { - case other: Range => - (this.length : @annotation.switch) match { - case 0 => other.isEmpty - case 1 => other.length == 1 && this.start == other.start - case n => other.length == n && ( - (this.start == other.start) - && (this.step == other.step) - ) - } - case _ => super.sameElements(that) - } - - /** Creates a new range containing the first `n` elements of this range. - * - * @param n the number of elements to take. - * @return a new range consisting of `n` first elements. - */ - final override def take(n: Int): Range = - if (n <= 0 || isEmpty) newEmptyRange(start) - else if (n >= numRangeElements && numRangeElements >= 0) this - else { - // May have more than Int.MaxValue elements in range (numRangeElements < 0) - // but the logic is the same either way: take the first n - new Range.Inclusive(start, locationAfterN(n - 1), step) - } - - /** Creates a new range containing all the elements of this range except the first `n` elements. - * - * @param n the number of elements to drop. - * @return a new range consisting of all the elements of this range except `n` first elements. - */ - final override def drop(n: Int): Range = - if (n <= 0 || isEmpty) this - else if (n >= numRangeElements && numRangeElements >= 0) newEmptyRange(end) - else { - // May have more than Int.MaxValue elements (numRangeElements < 0) - // but the logic is the same either way: go forwards n steps, keep the rest - copy(locationAfterN(n), end, step) - } - - /** Creates a new range consisting of the last `n` elements of the range. - * - * $doesNotUseBuilders - */ - final override def takeRight(n: Int): Range = { - if (n <= 0) newEmptyRange(start) - else if (numRangeElements >= 0) drop(numRangeElements - n) - else { - // Need to handle over-full range separately - val y = last - val x = y - step.toLong*(n-1) - if ((step > 0 && x < start) || (step < 0 && x > start)) this - else Range.inclusive(x.toInt, y, step) - } - } - - /** Creates a new range consisting of the initial `length - n` elements of the range. - * - * $doesNotUseBuilders - */ - final override def dropRight(n: Int): Range = { - if (n <= 0) this - else if (numRangeElements >= 0) take(numRangeElements - n) - else { - // Need to handle over-full range separately - val y = last - step.toInt*n - if ((step > 0 && y < start) || (step < 0 && y > start)) newEmptyRange(start) - else Range.inclusive(start, y.toInt, step) - } - } - - // Advance from the start while we meet the given test - private[this] def argTakeWhile(p: Int => Boolean): Long = { - if (isEmpty) start - else { - var current = start - val stop = last - while (current != stop && p(current)) current += step - if (current != stop || !p(current)) current - else current.toLong + step - } - } - - final override def takeWhile(p: Int => Boolean): Range = { - val stop = argTakeWhile(p) - if (stop==start) newEmptyRange(start) - else { - val x = (stop - step).toInt - if (x == last) this - else Range.inclusive(start, x, step) - } - } - - final override def dropWhile(p: Int => Boolean): Range = { - val stop = argTakeWhile(p) - if (stop == start) this - else { - val x = (stop - step).toInt - if (x == last) newEmptyRange(last) - else Range.inclusive(x + step, last, step) - } - } - - final override def span(p: Int => Boolean): (Range, Range) = { - val border = argTakeWhile(p) - if (border == start) (newEmptyRange(start), this) - else { - val x = (border - step).toInt - if (x == last) (this, newEmptyRange(last)) - else (Range.inclusive(start, x, step), Range.inclusive(x+step, last, step)) - } - } - - /** Creates a new range containing the elements starting at `from` up to but not including `until`. - * - * $doesNotUseBuilders - * - * @param from the element at which to start - * @param until the element at which to end (not included in the range) - * @return a new range consisting of a contiguous interval of values in the old range - */ - final override def slice(from: Int, until: Int): Range = - if (from <= 0) take(until) - else if (until >= numRangeElements && numRangeElements >= 0) drop(from) - else { - val fromValue = locationAfterN(from) - if (from >= until) newEmptyRange(fromValue) - else Range.inclusive(fromValue, locationAfterN(until-1), step) - } - - // Overridden only to refine the return type - final override def splitAt(n: Int): (Range, Range) = (take(n), drop(n)) - - // Methods like apply throw exceptions on invalid n, but methods like take/drop - // are forgiving: therefore the checks are with the methods. - private[this] def locationAfterN(n: Int) = start + (step * n) - - // When one drops everything. Can't ever have unchecked operations - // like "end + 1" or "end - 1" because ranges involving Int.{ MinValue, MaxValue } - // will overflow. This creates an exclusive range where start == end - // based on the given value. - private[this] def newEmptyRange(value: Int) = new Range.Exclusive(value, value, step) - - /** Returns the reverse of this range. - */ - final override def reverse: Range = - if (isEmpty) this - else new Range.Inclusive(last, start, -step) - - /** Make range inclusive. - */ - final def inclusive: Range = - if (isInclusive) this - else new Range.Inclusive(start, end, step) - - final def contains(x: Int): Boolean = { - if (x == end && !isInclusive) false - else if (step > 0) { - if (x < start || x > end) false - else (step == 1) || (Integer.remainderUnsigned(x - start, step) == 0) - } - else { - if (x < end || x > start) false - else (step == -1) || (Integer.remainderUnsigned(start - x, -step) == 0) - } - } - /* Seq#contains has a type parameter so the optimised contains above doesn't override it */ - override final def contains[B >: Int](elem: B): Boolean = elem match { - case i: Int => this.contains(i) - case _ => super.contains(elem) - } - - final override def sum[B >: Int](implicit num: Numeric[B]): Int = { - if (num eq scala.math.Numeric.IntIsIntegral) { - // this is normal integer range with usual addition. arithmetic series formula can be used - if (isEmpty) 0 - else if (size == 1) head - else ((size * (head.toLong + last)) / 2).toInt - } else { - // user provided custom Numeric, we cannot rely on arithmetic series formula - if (isEmpty) num.toInt(num.zero) - else { - var acc = num.zero - var i = head - while (true) { - acc = num.plus(acc, i) - if (i == lastElement) return num.toInt(acc) - i = i + step - } - 0 // Never hit this--just to satisfy compiler since it doesn't know while(true) has type Nothing - } - } - } - - final override def min[A1 >: Int](implicit ord: Ordering[A1]): Int = - if (ord eq Ordering.Int) { - if (step > 0) head - else last - } else if (Ordering.Int isReverseOf ord) { - if (step > 0) last - else head - } else super.min(ord) - - final override def max[A1 >: Int](implicit ord: Ordering[A1]): Int = - if (ord eq Ordering.Int) { - if (step > 0) last - else head - } else if (Ordering.Int isReverseOf ord) { - if (step > 0) head - else last - } else super.max(ord) - - override def tails: Iterator[Range] = - new AbstractIterator[Range] { - private[this] var i = 0 - override def hasNext = i <= Range.this.length - override def next() = { - if (hasNext) { - val res = Range.this.drop(i) - i += 1 - res - } else { - Iterator.empty.next() - } - } - } - - override def inits: Iterator[Range] = - new AbstractIterator[Range] { - private[this] var i = 0 - override def hasNext = i <= Range.this.length - override def next() = { - if (hasNext) { - val res = Range.this.dropRight(i) - i += 1 - res - } else { - Iterator.empty.next() - } - } - } - override protected final def applyPreferredMaxLength: Int = Int.MaxValue - - final override def equals(other: Any): Boolean = other match { - case x: Range => - // Note: this must succeed for overfull ranges (length > Int.MaxValue) - if (isEmpty) x.isEmpty // empty sequences are equal - else // this is non-empty... - x.nonEmpty && start == x.start && { // ...so other must contain something and have same start - val l0 = last - (l0 == x.last && ( // And same end - start == l0 || step == x.step // And either the same step, or not take any steps - )) - } - case _ => - super.equals(other) - } - - final override def hashCode: Int = - if(length >= 2) MurmurHash3.rangeHash(start, step, lastElement) - else super.hashCode - - final override def toString: String = { - val preposition = if (isInclusive) "to" else "until" - val stepped = if (step == 1) "" else s" by $step" - val prefix = if (isEmpty) "empty " else if (!isExact) "inexact " else "" - s"${prefix}Range $start $preposition $end$stepped" - } - - override protected[this] def className = "Range" - - override def distinct: Range = this - - override def grouped(size: Int): Iterator[Range] = { - require(size >= 1, f"size=$size%d, but size must be positive") - if (isEmpty) { - Iterator.empty - } else { - val s = size - new AbstractIterator[Range] { - private[this] var i = 0 - override def hasNext = Range.this.length > i - override def next() = - if (hasNext) { - val x = Range.this.slice(i, i + s) - i += s - x - } else { - Iterator.empty.next() - } - } - } - } - - override def sorted[B >: Int](implicit ord: Ordering[B]): IndexedSeq[Int] = - if (ord eq Ordering.Int) { - if (step > 0) { - this - } else { - reverse - } - } else { - super.sorted(ord) - } -} - -/** - * Companion object for ranges. - * @define Coll `Range` - * @define coll range - */ -object Range { - - /** Counts the number of range elements. - * precondition: step != 0 - * If the size of the range exceeds Int.MaxValue, the - * result will be negative. - */ - def count(start: Int, end: Int, step: Int, isInclusive: Boolean): Int = { - if (step == 0) - throw new IllegalArgumentException("step cannot be 0.") - - val isEmpty = - if (start == end) !isInclusive - else if (start < end) step < 0 - else step > 0 - - if (isEmpty) 0 - else { - // Counts with Longs so we can recognize too-large ranges. - val gap: Long = end.toLong - start.toLong - val jumps: Long = gap / step - // Whether the size of this range is one larger than the - // number of full-sized jumps. - val hasStub = isInclusive || (gap % step != 0) - val result: Long = jumps + ( if (hasStub) 1 else 0 ) - - if (result > scala.Int.MaxValue) -1 - else result.toInt - } - } - def count(start: Int, end: Int, step: Int): Int = - count(start, end, step, isInclusive = false) - - /** Make a range from `start` until `end` (exclusive) with given step value. - * @note step != 0 - */ - def apply(start: Int, end: Int, step: Int): Range.Exclusive = new Range.Exclusive(start, end, step) - - /** Make a range from `start` until `end` (exclusive) with step value 1. - */ - def apply(start: Int, end: Int): Range.Exclusive = new Range.Exclusive(start, end, 1) - - /** Make an inclusive range from `start` to `end` with given step value. - * @note step != 0 - */ - def inclusive(start: Int, end: Int, step: Int): Range.Inclusive = new Range.Inclusive(start, end, step) - - /** Make an inclusive range from `start` to `end` with step value 1. - */ - def inclusive(start: Int, end: Int): Range.Inclusive = new Range.Inclusive(start, end, 1) - - @SerialVersionUID(3L) - final class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) { - def isInclusive: Boolean = true - } - - @SerialVersionUID(3L) - final class Exclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) { - def isInclusive: Boolean = false - } - - // BigInt and Long are straightforward generic ranges. - object BigInt { - def apply(start: BigInt, end: BigInt, step: BigInt): NumericRange.Exclusive[BigInt] = NumericRange(start, end, step) - def inclusive(start: BigInt, end: BigInt, step: BigInt): NumericRange.Inclusive[BigInt] = NumericRange.inclusive(start, end, step) - } - - object Long { - def apply(start: Long, end: Long, step: Long): NumericRange.Exclusive[Long] = NumericRange(start, end, step) - def inclusive(start: Long, end: Long, step: Long): NumericRange.Inclusive[Long] = NumericRange.inclusive(start, end, step) - } - - // BigDecimal uses an alternative implementation of Numeric in which - // it pretends to be Integral[T] instead of Fractional[T]. See Numeric for - // details. The intention is for it to throw an exception anytime - // imprecision or surprises might result from anything, although this may - // not yet be fully implemented. - object BigDecimal { - implicit val bigDecAsIntegral: Numeric.BigDecimalAsIfIntegral = Numeric.BigDecimalAsIfIntegral - - def apply(start: BigDecimal, end: BigDecimal, step: BigDecimal): NumericRange.Exclusive[BigDecimal] = - NumericRange(start, end, step) - def inclusive(start: BigDecimal, end: BigDecimal, step: BigDecimal): NumericRange.Inclusive[BigDecimal] = - NumericRange.inclusive(start, end, step) - } - - // As there is no appealing default step size for not-really-integral ranges, - // we offer a partially constructed object. - class Partial[T, U](private val f: T -> U) extends AnyVal { - def by(x: T): U = f(x) - override def toString = "Range requires step" - } - - // Illustrating genericity with Int Range, which should have the same behavior - // as the original Range class. However we leave the original Range - // indefinitely, for performance and because the compiler seems to bootstrap - // off it and won't do so with our parameterized version without modifications. - object Int { - def apply(start: Int, end: Int, step: Int): NumericRange.Exclusive[Int] = NumericRange(start, end, step) - def inclusive(start: Int, end: Int, step: Int): NumericRange.Inclusive[Int] = NumericRange.inclusive(start, end, step) - } - - private def emptyRangeError(what: String): Throwable = - new NoSuchElementException(what + " on empty Range") -} - -/** - * @param lastElement The last element included in the Range - * @param initiallyEmpty Whether the Range was initially empty or not - */ -@SerialVersionUID(3L) -private class RangeIterator( - start: Int, - step: Int, - lastElement: Int, - initiallyEmpty: Boolean -) extends AbstractIterator[Int] with Serializable { - private[this] var _hasNext: Boolean = !initiallyEmpty - private[this] var _next: Int = start - override def knownSize: Int = if (_hasNext) (lastElement - _next) / step + 1 else 0 - def hasNext: Boolean = _hasNext - @throws[NoSuchElementException] - def next(): Int = { - if (!_hasNext) Iterator.empty.next() - val value = _next - _hasNext = value != lastElement - _next = value + step - value - } - - override def drop(n: Int): Iterator[Int] = { - if (n > 0) { - val longPos = _next.toLong + step * n - if (step > 0) { - _next = Math.min(lastElement, longPos).toInt - _hasNext = longPos <= lastElement - } - else if (step < 0) { - _next = Math.max(lastElement, longPos).toInt - _hasNext = longPos >= lastElement - } - } - this - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala b/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala deleted file mode 100644 index 5fbc927d7a21..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala +++ /dev/null @@ -1,1234 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import scala.annotation.meta.{getter, setter} -import scala.annotation.tailrec -import scala.runtime.Statics.releaseFence -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`. - * - * Implementation note: since efficiency is important for data structures this implementation - * uses `null` to represent empty trees. This also means pattern matching cannot - * easily be used. The API represented by the RedBlackTree object tries to hide these - * optimizations behind a reasonably clean API. - */ -private[collection] object RedBlackTree { - - def isEmpty(tree: Tree[_, _]): Boolean = tree eq null - - def contains[A: Ordering](tree: Tree[A, _], x: A): Boolean = lookup(tree, x) ne null - def get[A: Ordering, B](tree: Tree[A, B], x: A): Option[B] = lookup(tree, x) match { - case null => None - case tree => Some(tree.value) - } - - @tailrec - def lookup[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { - val cmp = ordering.compare(x, tree.key) - if (cmp < 0) lookup(tree.left, x) - else if (cmp > 0) lookup(tree.right, x) - else tree - } - private[immutable] abstract class Helper[A](implicit val ordering: Ordering[A]) { - def beforePublish[B](tree: Tree[A, B]): Tree[A, B] = { - if (tree eq null) tree - else if (tree.isMutable) { - val res = tree.mutableBlack.makeImmutable - releaseFence() - res - } else tree.black - } - /** Create a new balanced tree where `newLeft` replaces `tree.left`. - * tree and newLeft are never null */ - protected[this] final def mutableBalanceLeft[A1, B, B1 >: B](tree: Tree[A1, B], newLeft: Tree[A1, B1]): Tree[A1, B1] = { - // Parameter trees - // tree | newLeft - // -- KV R | nl.L nl.KV nl.R - // | nl.R.L nl.R.KV nl.R.R - //Note - unlike the immutable trees we can't consider tree.left eq newLeft - //as the balance operations may mutate the same object - //but that check was mostly to avoid the object creation - if (newLeft.isRed) { - val newLeft_left = newLeft.left - val newLeft_right = newLeft.right - if (isRedTree(newLeft_left)) { - // RED - // black(nl.L) nl.KV black - // nl.R KV R - val resultLeft = newLeft_left.mutableBlack - val resultRight = tree.mutableBlackWithLeft(newLeft_right) - - newLeft.mutableWithLeftRight(resultLeft, resultRight) - } else if (isRedTree(newLeft_right)) { - // RED - // black nl.R.KV black - // nl.L nl.KV nl.R.L nl.R.R KV R - - val newLeft_right_right = newLeft_right.right - - val resultLeft = newLeft.mutableBlackWithRight(newLeft_right.left) - val resultRight = tree.mutableBlackWithLeft(newLeft_right_right) - - newLeft_right.mutableWithLeftRight(resultLeft, resultRight) - } else { - // tree - // newLeft KV R - tree.mutableWithLeft(newLeft) - } - } else { - // tree - // newLeft KV R - tree.mutableWithLeft(newLeft) - } - } - /** Create a new balanced tree where `newRight` replaces `tree.right`. - * tree and newRight are never null */ - protected[this] final def mutableBalanceRight[A1, B, B1 >: B](tree: Tree[A1, B], newRight: Tree[A1, B1]): Tree[A1, B1] = { - // Parameter trees - // tree | newRight - // L KV -- | nr.L nr.KV nr.R - // | nr.L.L nr.L.KV nr.L.R - //Note - unlike the immutable trees we can't consider tree.right eq newRight - //as the balance operations may mutate the same object - //but that check was mostly to avoid the object creation - if (newRight.isRed) { - val newRight_left = newRight.left - if (isRedTree(newRight_left)) { - // RED - // black nr.L.KV black - // L KV nr.L.L nr.L.R nr.KV nr.R - - val resultLeft = tree.mutableBlackWithRight(newRight_left.left) - val resultRight = newRight.mutableBlackWithLeft(newRight_left.right) - - newRight_left.mutableWithLeftRight(resultLeft, resultRight) - - } else { - val newRight_right = newRight.right - if (isRedTree(newRight_right)) { - // RED - // black nr.KV black(nr.R) - // L KV nr.L - - val resultLeft = tree.mutableBlackWithRight(newRight_left) - val resultRight = newRight_right.mutableBlack - - newRight.mutableWithLeftRight(resultLeft, resultRight) - } else { - // tree - // L KV newRight - tree.mutableWithRight(newRight) - } - } - } else { - // tree - // L KV newRight - tree.mutableWithRight(newRight) - } - } - } - private[immutable] class SetHelper[A](implicit ordering: Ordering[A]) extends Helper[A] { - protected[this] final def mutableUpd(tree: Tree[A, Any], k: A): Tree[A, Any] = - if (tree eq null) { - mutableRedTree(k, (), null, null) - } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) { - tree - } else { - val cmp = ordering.compare(k, tree.key) - if (cmp < 0) - mutableBalanceLeft(tree, mutableUpd(tree.left, k)) - else if (cmp > 0) - mutableBalanceRight(tree, mutableUpd(tree.right, k)) - else tree - } - } - private[immutable] class MapHelper[A, B](implicit ordering: Ordering[A]) extends Helper[A] { - protected[this] final def mutableUpd[B1 >: B](tree: Tree[A, B], k: A, v: B1): Tree[A, B1] = - if (tree eq null) { - mutableRedTree(k, v, null, null) - } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) { - tree.mutableWithV(v) - } else { - val cmp = ordering.compare(k, tree.key) - if (cmp < 0) - mutableBalanceLeft(tree, mutableUpd(tree.left, k, v)) - else if (cmp > 0) - mutableBalanceRight(tree, mutableUpd(tree.right, k, v)) - else tree.mutableWithV(v) - } - } - - def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count - def update[A: Ordering, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean): Tree[A, B1] = blacken(upd(tree, k, v, overwrite)) - def delete[A: Ordering, B](tree: Tree[A, B], k: A): Tree[A, B] = blacken(del(tree, k)) - def rangeImpl[A: Ordering, B](tree: Tree[A, B], from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match { - case (Some(from), Some(until)) => this.range(tree, from, until) - case (Some(from), None) => this.from(tree, from) - case (None, Some(until)) => this.until(tree, until) - case (None, None) => tree - } - def range[A: Ordering, B](tree: Tree[A, B], from: A, until: A): Tree[A, B] = blacken(doRange(tree, from, until)) - def from[A: Ordering, B](tree: Tree[A, B], from: A): Tree[A, B] = blacken(doFrom(tree, from)) - def to[A: Ordering, B](tree: Tree[A, B], to: A): Tree[A, B] = blacken(doTo(tree, to)) - def until[A: Ordering, B](tree: Tree[A, B], key: A): Tree[A, B] = blacken(doUntil(tree, key)) - - def drop[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doDrop(tree, n)) - def take[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doTake(tree, n)) - def slice[A: Ordering, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = blacken(doSlice(tree, from, until)) - - def smallest[A, B](tree: Tree[A, B]): Tree[A, B] = { - if (tree eq null) throw new NoSuchElementException("empty tree") - var result = tree - while (result.left ne null) result = result.left - result - } - def greatest[A, B](tree: Tree[A, B]): Tree[A, B] = { - if (tree eq null) throw new NoSuchElementException("empty tree") - var result = tree - while (result.right ne null) result = result.right - result - } - - def tail[A, B](tree: Tree[A, B]): Tree[A, B] = { - def _tail(tree: Tree[A, B]): Tree[A, B] = - if (tree eq null) throw new NoSuchElementException("empty tree") - else { - val tl = tree.left - if (tl eq null) tree.right - else if (tl.isBlack) balLeft(tree, _tail(tl), tree.right) - else tree.redWithLeft(_tail(tree.left)) - } - blacken(_tail(tree)) - } - - def init[A, B](tree: Tree[A, B]): Tree[A, B] = { - def _init(tree: Tree[A, B]): Tree[A, B] = - if (tree eq null) throw new NoSuchElementException("empty tree") - else { - val tr = tree.right - if (tr eq null) tree.left - else if (tr.isBlack) balRight(tree, tree.left, _init(tr)) - else tree.redWithRight(_init(tr)) - } - blacken(_init(tree)) - } - - /** - * Returns the smallest node with a key larger than or equal to `x`. Returns `null` if there is no such node. - */ - def minAfter[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { - val cmp = ordering.compare(x, tree.key) - if (cmp == 0) tree - else if (cmp < 0) { - val l = minAfter(tree.left, x) - if (l != null) l else tree - } else minAfter(tree.right, x) - } - - /** - * Returns the largest node with a key smaller than `x`. Returns `null` if there is no such node. - */ - def maxBefore[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { - val cmp = ordering.compare(x, tree.key) - if (cmp <= 0) maxBefore(tree.left, x) - else { - val r = maxBefore(tree.right, x) - if (r != null) r else tree - } - } - - def foreach[A,B,U](tree:Tree[A,B], f:((A,B)) => U):Unit = if (tree ne null) _foreach(tree,f) - - def keysEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = { - if (a eq b) true - else if (a eq null) false - else if (b eq null) false - else a.count == b.count && (new EqualsIterator(a)).sameKeys(new EqualsIterator(b)) - } - def valuesEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = { - if (a eq b) true - else if (a eq null) false - else if (b eq null) false - else a.count == b.count && (new EqualsIterator(a)).sameValues(new EqualsIterator(b)) - } - def entriesEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = { - if (a eq b) true - else if (a eq null) false - else if (b eq null) false - else a.count == b.count && (new EqualsIterator(a)).sameEntries(new EqualsIterator(b)) - } - - private[this] def _foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = { - if (tree.left ne null) _foreach(tree.left, f) - f((tree.key, tree.value)) - if (tree.right ne null) _foreach(tree.right, f) - } - - def foreachKey[A, U](tree:Tree[A,_], f: A => U):Unit = if (tree ne null) _foreachKey(tree,f) - - private[this] def _foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = { - if (tree.left ne null) _foreachKey(tree.left, f) - f((tree.key)) - if (tree.right ne null) _foreachKey(tree.right, f) - } - - def foreachEntry[A, B, U](tree:Tree[A,B], f: (A, B) => U):Unit = if (tree ne null) _foreachEntry(tree,f) - - private[this] def _foreachEntry[A, B, U](tree: Tree[A, B], f: (A, B) => U): Unit = { - if (tree.left ne null) _foreachEntry(tree.left, f) - f(tree.key, tree.value) - if (tree.right ne null) _foreachEntry(tree.right, f) - } - - def iterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[(A, B)] = new EntriesIterator(tree, start) - def keysIterator[A: Ordering](tree: Tree[A, _], start: Option[A] = None): Iterator[A] = new KeysIterator(tree, start) - def valuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[B] = new ValuesIterator(tree, start) - - @tailrec - def nth[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = { - val count = this.count(tree.left) - if (n < count) nth(tree.left, n) - else if (n > count) nth(tree.right, n - count - 1) - else tree - } - - def isBlack(tree: Tree[_, _]) = (tree eq null) || tree.isBlack - - @`inline` private[this] def isRedTree(tree: Tree[_, _]) = (tree ne null) && tree.isRed - @`inline` private[this] def isBlackTree(tree: Tree[_, _]) = (tree ne null) && tree.isBlack - - private[this] def blacken[A, B](t: Tree[A, B]): Tree[A, B] = if (t eq null) null else t.black - - // Blacken if the tree is red and has a red child. This is necessary when using methods such as `upd` or `updNth` - // for building subtrees. Use `blacken` instead when building top-level trees. - private[this] def maybeBlacken[A, B](t: Tree[A, B]): Tree[A, B] = - if(isBlack(t)) t else if(isRedTree(t.left) || isRedTree(t.right)) t.black else t - - private[this] def mkTree[A, B](isBlack: Boolean, key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = { - val sizeAndColour = sizeOf(left) + sizeOf(right) + 1 | (if(isBlack) initialBlackCount else initialRedCount) - new Tree(key, value.asInstanceOf[AnyRef], left, right, sizeAndColour) - } - - /** Create a new balanced tree where `newLeft` replaces `tree.left`. */ - private[this] def balanceLeft[A, B1](tree: Tree[A, B1], newLeft: Tree[A, B1]): Tree[A, B1] = { - // Parameter trees - // tree | newLeft - // -- KV R | nl.L nl.KV nl.R - // | nl.R.L nl.R.KV nl.R.R - if (tree.left eq newLeft) tree - else { - if (newLeft.isRed) { - val newLeft_left = newLeft.left - val newLeft_right = newLeft.right - if (isRedTree(newLeft_left)) { - // RED - // black(nl.L) nl.KV black - // nl.R KV R - val resultLeft = newLeft_left.black - val resultRight = tree.blackWithLeft(newLeft_right) - - newLeft.withLeftRight(resultLeft, resultRight) - } else if (isRedTree(newLeft_right)) { - // RED - // black nl.R.KV black - // nl.L nl.KV nl.R.L nl.R.R KV R - val newLeft_right_right = newLeft_right.right - - val resultLeft = newLeft.blackWithRight(newLeft_right.left) - val resultRight = tree.blackWithLeft(newLeft_right_right) - - newLeft_right.withLeftRight(resultLeft, resultRight) - } else { - // tree - // newLeft KV R - tree.withLeft(newLeft) - } - } else { - // tree - // newLeft KV R - tree.withLeft(newLeft) - } - } - } - /** Create a new balanced tree where `newRight` replaces `tree.right`. */ - private[this] def balanceRight[A, B1](tree: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { - // Parameter trees - // tree | newRight - // L KV -- | nr.L nr.KV nr.R - // | nr.L.L nr.L.KV nr.L.R - if (tree.right eq newRight) tree - else { - if (newRight.isRed) { - val newRight_left = newRight.left - if (isRedTree(newRight_left)) { - // RED - // black nr.L.KV black - // L KV nr.L.L nr.L.R nr.KV nr.R - val resultLeft = tree.blackWithRight(newRight_left.left) - val resultRight = newRight.blackWithLeft(newRight_left.right) - - newRight_left.withLeftRight(resultLeft, resultRight) - } else { - val newRight_right = newRight.right - if (isRedTree(newRight_right)) { - // RED - // black nr.KV black(nr.R) - // L KV nr.L - val resultLeft = tree.blackWithRight(newRight_left) - val resultRight = newRight_right.black - - newRight.withLeftRight(resultLeft, resultRight) - } else { - // tree - // L KV newRight - tree.withRight(newRight) - } - } - } else { - // tree - // L KV newRight - tree.withRight(newRight) - } - } - } - - private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) { - RedTree(k, v, null, null) - } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) { - if (overwrite) - tree.withV(v) - else tree - } else { - val cmp = ordering.compare(k, tree.key) - if (cmp < 0) - balanceLeft(tree, upd(tree.left, k, v, overwrite)) - else if (cmp > 0) - balanceRight(tree, upd(tree.right, k, v, overwrite)) - else if (overwrite && (v.asInstanceOf[AnyRef] ne tree.value.asInstanceOf[AnyRef])) - tree.withV(v) - else tree - } - private[this] def updNth[A, B, B1 >: B](tree: Tree[A, B], idx: Int, k: A, v: B1): Tree[A, B1] = if (tree eq null) { - RedTree(k, v, null, null) - } else { - val rank = count(tree.left) + 1 - if (idx < rank) - balanceLeft(tree, updNth(tree.left, idx, k, v)) - else if (idx > rank) - balanceRight(tree, updNth(tree.right, idx - rank, k, v)) - else tree - } - - private[this] def doFrom[A, B](tree: Tree[A, B], from: A)(implicit ordering: Ordering[A]): Tree[A, B] = { - if (tree eq null) return null - if (ordering.lt(tree.key, from)) return doFrom(tree.right, from) - val newLeft = doFrom(tree.left, from) - if (newLeft eq tree.left) tree - else if (newLeft eq null) upd(tree.right, tree.key, tree.value, overwrite = false) - else join(newLeft, tree.key, tree.value, tree.right) - } - private[this] def doTo[A, B](tree: Tree[A, B], to: A)(implicit ordering: Ordering[A]): Tree[A, B] = { - if (tree eq null) return null - if (ordering.lt(to, tree.key)) return doTo(tree.left, to) - val newRight = doTo(tree.right, to) - if (newRight eq tree.right) tree - else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false) - else join (tree.left, tree.key, tree.value, newRight) - } - private[this] def doUntil[A, B](tree: Tree[A, B], until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { - if (tree eq null) return null - if (ordering.lteq(until, tree.key)) return doUntil(tree.left, until) - val newRight = doUntil(tree.right, until) - if (newRight eq tree.right) tree - else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false) - else join(tree.left, tree.key, tree.value, newRight) - } - - private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { - if (tree eq null) return null - if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until) - if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until) - val newLeft = doFrom(tree.left, from) - val newRight = doUntil(tree.right, until) - if ((newLeft eq tree.left) && (newRight eq tree.right)) tree - else if (newLeft eq null) upd(newRight, tree.key, tree.value, overwrite = false) - else if (newRight eq null) upd(newLeft, tree.key, tree.value, overwrite = false) - else join(newLeft, tree.key, tree.value, newRight) - } - - private[this] def doDrop[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = - if((tree eq null) || (n <= 0)) tree - else if(n >= tree.count) null - else { - val l = count(tree.left) - if(n > l) doDrop(tree.right, n-l-1) - else if(n == l) join(null, tree.key, tree.value, tree.right) - else join(doDrop(tree.left, n), tree.key, tree.value, tree.right) - } - - private[this] def doTake[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = - if((tree eq null) || (n <= 0)) null - else if(n >= tree.count) tree - else { - val l = count(tree.left) - if(n <= l) doTake(tree.left, n) - else if(n == l+1) maybeBlacken(updNth(tree.left, n, tree.key, tree.value)) - else join(tree.left, tree.key, tree.value, doTake(tree.right, n-l-1)) - } - - private[this] def doSlice[A, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = - if((tree eq null) || (from >= until) || (from >= tree.count) || (until <= 0)) null - else if((from <= 0) && (until >= tree.count)) tree - else { - val l = count(tree.left) - if(until <= l) doSlice(tree.left, from, until) - else if(from > l) doSlice(tree.right, from-l-1, until-l-1) - else join(doDrop(tree.left, from), tree.key, tree.value, doTake(tree.right, until-l-1)) - } - - /* - * Forcing direct fields access using the @`inline` annotation helps speed up - * various operations (especially smallest/greatest and update/delete). - * - * Unfortunately the direct field access is not guaranteed to work (but - * works on the current implementation of the Scala compiler). - * - * An alternative is to implement the these classes using plain old Java code... - * - * Mutability - * This implementation encodes both mutable and immutable trees. - * Mutable trees are never exposed to the user code but we get significant reductions in both CPU and allocations - * by maintaining a mutable tree during internal operations, e.g. a builder building a Tree, and the other bulk - * API such as filter or ++ - * - * Mutable trees are only used within the confines of this bulk operation and not shared - * Mutable trees may transition to become immutable by calling beforePublish - * Mutable trees may have child nodes (left and right) which are immutable Trees (this promotes structural sharing) - * - * Immutable trees may only child nodes (left and right) which are immutable Trees, and as such the immutable - * trees the entire transitive subtree is immutable - * - * Colour, mutablity and size encoding - * The colour of the Tree, its mutablity and size are all encoded in the _count field - * The colour is encoded in the top bit (31) of the _count. This allows a mutable tree to change colour without - * additional allocation - * The mutable trees always have bits 0 .. 30 (inclusive) set to 0 - * The immutable trees always have bits 0 .. 30 containing the size of the transitive subtree - * - * Naming - * All of the methods that can yield a mutable result have "mutable" on their name, and generally there - * is another method similarly named with doesn't. This is to aid safety and to reduce the cognitive load when - * reviewing changes. e.g. - * def upd(...) will update an immutable Tree, producing an immutable Tree - * def mutableUpd(...) will update a mutable or immutable Tree and may return a mutable or immutable Tree - * a method that has mutable in its name may return a immutable tree if the operation can reuse the existing tree - * - */ - private[immutable] final class Tree[A, +B]( - @(`inline` @getter @setter) private var _key: A, - @(`inline` @getter @setter) private var _value: AnyRef, - @(`inline` @getter @setter) private var _left: Tree[A, _], - @(`inline` @getter @setter) private var _right: Tree[A, _], - @(`inline` @getter @setter) private var _count: Int) - { - @`inline` private[RedBlackTree] def isMutable: Boolean = (_count & colourMask) == 0 - // read only APIs - @`inline` private[RedBlackTree] final def count = { - //devTimeAssert((_count & 0x7FFFFFFF) != 0) - _count & colourMask - } - //retain the colour, and mark as mutable - @`inline` private def mutableRetainingColour = _count & colourBit - - //inlined here to avoid outer object null checks - @`inline` private[RedBlackTree] final def sizeOf(tree:Tree[_,_]) = if (tree eq null) 0 else tree.count - @`inline` private[immutable] final def key = _key - @`inline` private[immutable] final def value = _value.asInstanceOf[B] - @`inline` private[immutable] final def left = _left.asInstanceOf[Tree[A, B]] - @`inline` private[immutable] final def right = _right.asInstanceOf[Tree[A, B]] - //Note - only used in tests outside RedBlackTree - @`inline` private[immutable] final def isBlack = _count < 0 - //Note - only used in tests outside RedBlackTree - @`inline` private[immutable] final def isRed = _count >= 0 - - override def toString: String = s"${if(isRed) "RedTree" else "BlackTree"}($key, $value, $left, $right)" - - //mutable APIs - private[RedBlackTree] def makeImmutable: Tree[A, B] = { - def makeImmutableImpl() = { - if (isMutable) { - var size = 1 - if (_left ne null) { - _left.makeImmutable - size += _left.count - } - if (_right ne null) { - _right.makeImmutable - size += _right.count - } - _count |= size //retains colour - } - this - } - makeImmutableImpl() - this - } - - private[RedBlackTree] def mutableBlack: Tree[A, B] = { - if (isBlack) this - else if (isMutable) { - _count = initialBlackCount - this - } - else new Tree(_key, _value, _left, _right, initialBlackCount) - } -// private[RedBlackTree] def mutableRed: Tree[A, B] = { -// if (isRed) this -// else if (mutable) { -// _count = initialRedCount -// this -// } -// else new Tree(_key, _value, _left, _right, initialRedCount) -// } - - private[RedBlackTree] def mutableWithV[B1 >: B](newValue: B1): Tree[A, B1] = { - if (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef]) this - else if (isMutable) { - _value = newValue.asInstanceOf[AnyRef] - this - } else new Tree(_key, newValue.asInstanceOf[AnyRef], _left, _right, mutableRetainingColour) - } - - private[RedBlackTree] def mutableWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { - if (_left eq newLeft) this - else if (isMutable) { - _left = newLeft - this - } else new Tree(_key, _value, newLeft, _right, mutableRetainingColour) - } - private[RedBlackTree] def mutableWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { - if (_right eq newRight) this - else if (isMutable) { - _right = newRight - this - } else new Tree(_key, _value, _left, newRight, mutableRetainingColour) - } - private[RedBlackTree] def mutableWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { - if ((_left eq newLeft) && (_right eq newRight)) this - else if (isMutable) { - _left = newLeft - _right = newRight - this - } else new Tree(_key, _value, newLeft, newRight, mutableRetainingColour) - } - private[RedBlackTree] def mutableBlackWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { - if ((_left eq newLeft) && isBlack) this - else if (isMutable) { - _count = initialBlackCount - _left = newLeft - this - } else new Tree(_key, _value, newLeft, _right, initialBlackCount) - } - private[RedBlackTree] def mutableBlackWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { - if ((_right eq newRight) && isBlack) this - else if (isMutable) { - _count = initialBlackCount - _right = newRight - this - } else new Tree(_key, _value, _left, newRight, initialBlackCount) - } - - private[RedBlackTree] def black: Tree[A, B] = { - //assertNotMutable(this) - if (isBlack) this - else new Tree(_key, _value, _left, _right, _count ^ colourBit) - } - private[RedBlackTree] def red: Tree[A, B] = { - //assertNotMutable(this) - if (isRed) this - else new Tree(_key, _value, _left, _right, _count ^ colourBit) - } - private[RedBlackTree] def withKV[B1 >: B](newKey: A, newValue: B1): Tree[A, B1] = { - //assertNotMutable(this) - if ((newKey.asInstanceOf[AnyRef] eq _key.asInstanceOf[AnyRef]) && - (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef])) this - else new Tree(newKey, newValue.asInstanceOf[AnyRef], _left, _right, _count) - } - private[RedBlackTree] def withV[B1 >: B](newValue: B1): Tree[A, B1] = { - //assertNotMutable(this) - if (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef]) this - else new Tree(_key, newValue.asInstanceOf[AnyRef], _left, _right, _count) - } - - private[RedBlackTree] def withLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { - //assertNotMutable(this) - //assertNotMutable(newLeft) - if (newLeft eq _left) this - else { - val size = sizeOf(newLeft) + sizeOf(_right) + 1 - new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, (_count & colourBit) | size) - } - } - private[RedBlackTree] def withRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { - //assertNotMutable(this) - //assertNotMutable(newRight) - if (newRight eq _right) this - else { - val size = sizeOf(_left) + sizeOf(newRight) + 1 - new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, (_count & colourBit) | size) - } - } - private[RedBlackTree] def blackWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { - //assertNotMutable(this) - //assertNotMutable(newLeft) - if ((newLeft eq _left) && isBlack) this - else { - val size = sizeOf(newLeft) + sizeOf(_right) + 1 - new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, initialBlackCount | size) - } - } - private[RedBlackTree] def redWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { - //assertNotMutable(this) - //assertNotMutable(newLeft) - if ((newLeft eq _left) && isRed) this - else { - val size = sizeOf(newLeft) + sizeOf(_right) + 1 - new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, initialRedCount | size) - } - } - private[RedBlackTree] def blackWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { - //assertNotMutable(this) - //assertNotMutable(newRight) - if ((newRight eq _right) && isBlack) this - else { - val size = sizeOf(_left) + sizeOf(newRight) + 1 - new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, initialBlackCount | size) - } - } - private[RedBlackTree] def redWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { - //assertNotMutable(this) - //assertNotMutable(newLeft) - if ((newRight eq _right) && isRed) this - else { - val size = sizeOf(_left) + sizeOf(newRight) + 1 - new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, initialRedCount | size) - } - } - private[RedBlackTree] def withLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { - //assertNotMutable(this) - //assertNotMutable(newLeft) - //assertNotMutable(newRight) - if ((newLeft eq _left) && (newRight eq _right)) this - else { - val size = sizeOf(newLeft) + sizeOf(newRight) + 1 - new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, (_count & colourBit) | size) - } - } - private[RedBlackTree] def redWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { - //assertNotMutable(this) - //assertNotMutable(newLeft) - //assertNotMutable(newRight) - if ((newLeft eq _left) && (newRight eq _right) && isRed) this - else { - val size = sizeOf(newLeft) + sizeOf(newRight) + 1 - new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, initialRedCount | size) - } - } - private[RedBlackTree] def blackWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { - //assertNotMutable(this) - //assertNotMutable(newLeft) - //assertNotMutable(newRight) - if ((newLeft eq _left) && (newRight eq _right) && isBlack) this - else { - val size = sizeOf(newLeft) + sizeOf(newRight) + 1 - new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, initialBlackCount | size) - } - } - } - //see #Tree docs "Colour, mutablity and size encoding" - //we make these final vals because the optimiser inlines them, without reference to the enclosing module - private[RedBlackTree] final val colourBit = 0x80000000 - //really its ~colourBit but that doesnt get inlined - private[RedBlackTree] final val colourMask = colourBit - 1 - private[RedBlackTree] final val initialBlackCount = colourBit - private[RedBlackTree] final val initialRedCount = 0 - - @`inline` private[RedBlackTree] def mutableRedTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialRedCount) - @`inline` private[RedBlackTree] def mutableBlackTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialBlackCount) - - /** create a new immutable red tree. - * left and right may be null - */ - private[immutable] def RedTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]): Tree[A, B] = { - //assertNotMutable(left) - //assertNotMutable(right) - val size = sizeOf(left) + sizeOf(right) + 1 - new Tree(key, value.asInstanceOf[AnyRef], left, right, initialRedCount | size) - } - private[immutable] def BlackTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]): Tree[A, B] = { - //assertNotMutable(left) - //assertNotMutable(right) - val size = sizeOf(left) + sizeOf(right) + 1 - new Tree(key, value.asInstanceOf[AnyRef], left, right, initialBlackCount | size) - } - @`inline` private def sizeOf(tree:Tree[_,_]) = if (tree eq null) 0 else tree.count - //immutable APIs - //assertions - uncomment decls and callers when changing functionality - // private def devTimeAssert(assertion: Boolean) = { - // //uncomment this during development of the functionality - // assert(assertion) - // } - // private def assertNotMutable(t:Tree[_,_]) = { - // devTimeAssert ((t eq null) || t.count > 0) - // } - private[this] abstract class TreeIterator[A, B, R](root: Tree[A, B], start: Option[A])(protected implicit val ordering: Ordering[A]) extends AbstractIterator[R] { - protected[this] def nextResult(tree: Tree[A, B]): R - - override def hasNext: Boolean = lookahead ne null - - @throws[NoSuchElementException] - override def next(): R = { - val tree = lookahead - if(tree ne null) { - lookahead = findLeftMostOrPopOnEmpty(goRight(tree)) - nextResult(tree) - } else Iterator.empty.next() - } - - @tailrec - protected final def findLeftMostOrPopOnEmpty(tree: Tree[A, B]): Tree[A, B] = - if (tree eq null) popNext() - else if (tree.left eq null) tree - else findLeftMostOrPopOnEmpty(goLeft(tree)) - - @`inline` private[this] def pushNext(tree: Tree[A, B]): Unit = { - stackOfNexts(index) = tree - index += 1 - } - @`inline` protected final def popNext(): Tree[A, B] = if (index == 0) null else { - index -= 1 - stackOfNexts(index) - } - - protected[this] val stackOfNexts = if (root eq null) null else { - /* - * According to "Ralf Hinze. Constructing red-black trees" [https://www.cs.ox.ac.uk/ralf.hinze/publications/#P5] - * the maximum height of a red-black tree is 2*log_2(n + 2) - 2. - * - * According to {@see Integer#numberOfLeadingZeros} ceil(log_2(n)) = (32 - Integer.numberOfLeadingZeros(n - 1)) - * - * Although we don't store the deepest nodes in the path during iteration, - * we potentially do so in `startFrom`. - */ - val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(root.count + 2 - 1)) - 2 - new Array[Tree[A, B] @uncheckedCaptures](maximumHeight) - } - private[this] var index = 0 - protected var lookahead: Tree[A, B] @uncheckedCaptures = - if (start.isDefined) startFrom(start.get) else findLeftMostOrPopOnEmpty(root) - - /** - * Find the leftmost subtree whose key is equal to the given key, or if no such thing, - * the leftmost subtree with the key that would be "next" after it according - * to the ordering. Along the way build up the iterator's path stack so that "next" - * functionality works. - */ - private[this] def startFrom(key: A) : Tree[A,B] = if (root eq null) null else { - @tailrec def find(tree: Tree[A, B]): Tree[A, B] = - if (tree eq null) popNext() - else find( - if (ordering.lteq(key, tree.key)) goLeft(tree) - else goRight(tree) - ) - find(root) - } - - @`inline` private[this] def goLeft(tree: Tree[A, B]) = { - pushNext(tree) - tree.left - } - - @`inline` protected final def goRight(tree: Tree[A, B]) = tree.right - } - - private[this] class EqualsIterator[A: Ordering, B](tree: Tree[A, B]) extends TreeIterator[A, B, Unit](tree, None) { - override def nextResult(tree: Tree[A, B]) = ??? - - def sameKeys[X](that:EqualsIterator[A,X]): Boolean = { - var equal = true - while (equal && (this.lookahead ne null) && (that.lookahead ne null)) { - if (this.lookahead eq that.lookahead) { - this.lookahead = this.popNext() - that.lookahead = that.popNext() - } else { - equal = (this.lookahead.key.asInstanceOf[AnyRef] eq that.lookahead.key.asInstanceOf[AnyRef]) || - ordering.equiv(this.lookahead.key, that.lookahead.key) - this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead)) - that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead)) - } - } - equal && (this.lookahead eq null) && (that.lookahead eq null) - } - def sameValues[X](that:EqualsIterator[A,X]): Boolean = { - var equal = true - while (equal && (this.lookahead ne null) && (that.lookahead ne null)) { - if (this.lookahead eq that.lookahead) { - this.lookahead = this.popNext() - that.lookahead = that.popNext() - } else { - equal = this.lookahead.value == that.lookahead.value - this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead)) - that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead)) - } - } - equal && (this.lookahead eq null) && (that.lookahead eq null) - } - def sameEntries[X](that:EqualsIterator[A,X]): Boolean = { - var equal = true - while (equal && (this.lookahead ne null) && (that.lookahead ne null)) { - if (this.lookahead eq that.lookahead) { - this.lookahead = this.popNext() - that.lookahead = that.popNext() - } else { - equal = ((this.lookahead.key.asInstanceOf[AnyRef] eq that.lookahead.key.asInstanceOf[AnyRef]) || - ordering.equiv(this.lookahead.key, that.lookahead.key)) && this.lookahead.value == that.lookahead.value - this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead)) - that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead)) - } - } - equal && (this.lookahead eq null) && (that.lookahead eq null) - } - } - private[this] class EntriesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, (A, B)](tree, focus) { - override def nextResult(tree: Tree[A, B]) = (tree.key, tree.value) - } - - private[this] class KeysIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, A](tree, focus) { - override def nextResult(tree: Tree[A, B]) = tree.key - } - - private[this] class ValuesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, B](tree, focus) { - override def nextResult(tree: Tree[A, B]) = tree.value - } - - /** Build a Tree suitable for a TreeSet from an ordered sequence of keys */ - def fromOrderedKeys[A](xs: Iterator[A], size: Int): Tree[A, Null] = { - val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes - def f(level: Int, size: Int): Tree[A, Null] = size match { - case 0 => null - case 1 => mkTree(level != maxUsedDepth || level == 1, xs.next(), null, null, null) - case n => - val leftSize = (size-1)/2 - val left = f(level+1, leftSize) - val x = xs.next() - val right = f(level+1, size-1-leftSize) - BlackTree(x, null, left, right) - } - f(1, size) - } - - /** Build a Tree suitable for a TreeMap from an ordered sequence of key/value pairs */ - def fromOrderedEntries[A, B](xs: Iterator[(A, B)], size: Int): Tree[A, B] = { - val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes - def f(level: Int, size: Int): Tree[A, B] = size match { - case 0 => null - case 1 => - val (k, v) = xs.next() - mkTree(level != maxUsedDepth || level == 1, k, v, null, null) - case n => - val leftSize = (size-1)/2 - val left = f(level+1, leftSize) - val (k, v) = xs.next() - val right = f(level+1, size-1-leftSize) - BlackTree(k, v, left, right) - } - f(1, size) - } - - def transform[A, B, C](t: Tree[A, B], f: (A, B) => C): Tree[A, C] = - if(t eq null) null - else { - val k = t.key - val v = t.value - val l = t.left - val r = t.right - val l2 = transform(l, f) - val v2 = f(k, v) - val r2 = transform(r, f) - if((v2.asInstanceOf[AnyRef] eq v.asInstanceOf[AnyRef]) - && (l2 eq l) - && (r2 eq r)) t.asInstanceOf[Tree[A, C]] - else mkTree(t.isBlack, k, v2, l2, r2) - } - - def filterEntries[A, B](t: Tree[A, B], f: (A, B) => Boolean): Tree[A, B] = if(t eq null) null else { - def fk(t: Tree[A, B]): Tree[A, B] = { - val k = t.key - val v = t.value - val l = t.left - val r = t.right - val l2 = if(l eq null) null else fk(l) - val keep = f(k, v) - val r2 = if(r eq null) null else fk(r) - if(!keep) join2(l2, r2) - else if((l2 eq l) && (r2 eq r)) t - else join(l2, k, v, r2) - } - blacken(fk(t)) - } - - private[this] val null2 = (null, null) - - def partitionEntries[A, B](t: Tree[A, B], p: (A, B) => Boolean): (Tree[A, B], Tree[A, B]) = if(t eq null) (null, null) else { - if (t eq null) null2 - else { - object partitioner { - var tmpk, tmpd = null: Tree[A, B] // shared vars to avoid returning tuples from fk - def fk(t: Tree[A, B]): Unit = { - val k = t.key - val v = t.value - val l = t.left - val r = t.right - var l2k, l2d, r2k, r2d = null: Tree[A, B] - if (l ne null) { - fk(l) - l2k = tmpk - l2d = tmpd - } - val keep = p(k, v) - if (r ne null) { - fk(r) - r2k = tmpk - r2d = tmpd - } - val jk = - if (!keep) join2(l2k, r2k) - else if ((l2k eq l) && (r2k eq r)) t - else join(l2k, k, v, r2k) - val jd = - if (keep) join2(l2d, r2d) - else if ((l2d eq l) && (r2d eq r)) t - else join(l2d, k, v, r2d) - tmpk = jk - tmpd = jd - } - } - - partitioner.fk(t) - (blacken(partitioner.tmpk), blacken(partitioner.tmpd)) - } - } - - // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees - // Constructing Red-Black Trees, Ralf Hinze: [[https://www.cs.ox.ac.uk/ralf.hinze/publications/WAAAPL99b.ps.gz]] - // Red-Black Trees in a Functional Setting, Chris Okasaki: [[https://wiki.rice.edu/confluence/download/attachments/2761212/Okasaki-Red-Black.pdf]] */ - - private[this] def del[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { - val cmp = ordering.compare(k, tree.key) - if (cmp < 0) { - val newLeft = del(tree.left, k) - if (newLeft eq tree.left) tree - else if (isBlackTree(tree.left)) balLeft(tree, newLeft, tree.right) - else tree.redWithLeft(newLeft) - } else if (cmp > 0) { - val newRight = del(tree.right, k) - if (newRight eq tree.right) tree - else if (isBlackTree(tree.right)) balRight(tree, tree.left, newRight) - else tree.redWithRight(newRight) - } else append(tree.left, tree.right) - } - - private[this] def balance[A, B](tree: Tree[A,B], tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = - if (isRedTree(tl)) { - if (isRedTree(tr)) tree.redWithLeftRight(tl.black, tr.black) - else if (isRedTree(tl.left)) tl.withLeftRight(tl.left.black, tree.blackWithLeftRight(tl.right, tr)) - else if (isRedTree(tl.right)) tl.right.withLeftRight(tl.blackWithRight(tl.right.left), tree.blackWithLeftRight(tl.right.right, tr)) - else tree.blackWithLeftRight(tl, tr) - } else if (isRedTree(tr)) { - if (isRedTree(tr.right)) tr.withLeftRight(tree.blackWithLeftRight(tl, tr.left), tr.right.black) - else if (isRedTree(tr.left)) tr.left.withLeftRight(tree.blackWithLeftRight(tl, tr.left.left), tr.blackWithLeftRight(tr.left.right, tr.right)) - else tree.blackWithLeftRight(tl, tr) - } else tree.blackWithLeftRight(tl, tr) - - private[this] def balLeft[A, B](tree: Tree[A,B], tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = - if (isRedTree(tl)) tree.redWithLeftRight(tl.black, tr) - else if (isBlackTree(tr)) balance(tree, tl, tr.red) - else if (isRedTree(tr) && isBlackTree(tr.left)) - tr.left.redWithLeftRight(tree.blackWithLeftRight(tl, tr.left.left), balance(tr, tr.left.right, tr.right.red)) - else sys.error("Defect: invariance violation") - - private[this] def balRight[A, B](tree: Tree[A,B], tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = - if (isRedTree(tr)) tree.redWithLeftRight(tl, tr.black) - else if (isBlackTree(tl)) balance(tree, tl.red, tr) - else if (isRedTree(tl) && isBlackTree(tl.right)) - tl.right.redWithLeftRight(balance(tl, tl.left.red, tl.right.left), tree.blackWithLeftRight(tl.right.right, tr)) - else sys.error("Defect: invariance violation") - - /** `append` is similar to `join2` but requires that both subtrees have the same black height */ - private[this] def append[A, B](tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = { - if (tl eq null) tr - else if (tr eq null) tl - else if (tl.isRed) { - if (tr.isRed) { - //tl is red, tr is red - val bc = append(tl.right, tr.left) - if (isRedTree(bc)) bc.withLeftRight(tl.withRight(bc.left), tr.withLeft(bc.right)) - else tl.withRight(tr.withLeft(bc)) - } else { - //tl is red, tr is black - tl.withRight(append(tl.right, tr)) - } - } else { - if (tr.isBlack) { - //tl is black tr is black - val bc = append(tl.right, tr.left) - if (isRedTree(bc)) bc.withLeftRight(tl.withRight(bc.left), tr.withLeft(bc.right)) - else balLeft(tl, tl.left, tr.withLeft(bc)) - } else { - //tl is black tr is red - tr.withLeft(append(tl, tr.left)) - } - } - } - - - // Bulk operations based on "Just Join for Parallel Ordered Sets" (https://www.cs.cmu.edu/~guyb/papers/BFS16.pdf) - // We don't store the black height in the tree so we pass it down into the join methods and derive the black height - // of child nodes from it. Where possible the black height is used directly instead of deriving the rank from it. - // Our trees are supposed to have a black root so we always blacken as the last step of union/intersect/difference. - - def union[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = blacken(_union(t1, t2)) - - def intersect[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = blacken(_intersect(t1, t2)) - - def difference[A, B](t1: Tree[A, B], t2: Tree[A, _])(implicit ordering: Ordering[A]): Tree[A, B] = - blacken(_difference(t1, t2.asInstanceOf[Tree[A, B]])) - - /** Compute the rank from a tree and its black height */ - @`inline` private[this] def rank(t: Tree[_, _], bh: Int): Int = { - if(t eq null) 0 - else if(t.isBlack) 2*(bh-1) - else 2*bh-1 - } - - private[this] def joinRight[A, B](tl: Tree[A, B], k: A, v: B, tr: Tree[A, B], bhtl: Int, rtr: Int): Tree[A, B] = { - val rtl = rank(tl, bhtl) - if(rtl == (rtr/2)*2) RedTree(k, v, tl, tr) - else { - val tlBlack = isBlackTree(tl) - val bhtlr = if(tlBlack) bhtl-1 else bhtl - val ttr = joinRight(tl.right, k, v, tr, bhtlr, rtr) - if(tlBlack && isRedTree(ttr) && isRedTree(ttr.right)) - RedTree(ttr.key, ttr.value, - BlackTree(tl.key, tl.value, tl.left, ttr.left), - ttr.right.black) - else mkTree(tlBlack, tl.key, tl.value, tl.left, ttr) - } - } - - private[this] def joinLeft[A, B](tl: Tree[A, B], k: A, v: B, tr: Tree[A, B], rtl: Int, bhtr: Int): Tree[A, B] = { - val rtr = rank(tr, bhtr) - if(rtr == (rtl/2)*2) RedTree(k, v, tl, tr) - else { - val trBlack = isBlackTree(tr) - val bhtrl = if(trBlack) bhtr-1 else bhtr - val ttl = joinLeft(tl, k, v, tr.left, rtl, bhtrl) - if(trBlack && isRedTree(ttl) && isRedTree(ttl.left)) - RedTree(ttl.key, ttl.value, - ttl.left.black, - BlackTree(tr.key, tr.value, ttl.right, tr.right)) - else mkTree(trBlack, tr.key, tr.value, ttl, tr.right) - } - } - - private[this] def join[A, B](tl: Tree[A, B], k: A, v: B, tr: Tree[A, B]): Tree[A, B] = { - @tailrec def h(t: Tree[_, _], i: Int): Int = - if(t eq null) i+1 else h(t.left, if(t.isBlack) i+1 else i) - val bhtl = h(tl, 0) - val bhtr = h(tr, 0) - if(bhtl > bhtr) { - val tt = joinRight(tl, k, v, tr, bhtl, rank(tr, bhtr)) - if(isRedTree(tt) && isRedTree(tt.right)) tt.black - else tt - } else if(bhtr > bhtl) { - val tt = joinLeft(tl, k, v, tr, rank(tl, bhtl), bhtr) - if(isRedTree(tt) && isRedTree(tt.left)) tt.black - else tt - } else mkTree(isRedTree(tl) || isRedTree(tr), k, v, tl, tr) - } - - private[this] def split[A, B](t: Tree[A, B], k2: A)(implicit ordering: Ordering[A]): (Tree[A, B], Tree[A, B], Tree[A, B], A) = - if(t eq null) (null, null, null, k2) - else { - val cmp = ordering.compare(k2, t.key) - if(cmp == 0) (t.left, t, t.right, t.key) - else if(cmp < 0) { - val (ll, b, lr, k1) = split(t.left, k2) - (ll, b, join(lr, t.key, t.value, t.right), k1) - } else { - val (rl, b, rr, k1) = split(t.right, k2) - (join(t.left, t.key, t.value, rl), b, rr, k1) - } - } - - private[this] def splitLast[A, B](t: Tree[A, B]): (Tree[A, B], A, B) = - if(t.right eq null) (t.left, t.key, t.value) - else { - val (tt, kk, vv) = splitLast(t.right) - (join(t.left, t.key, t.value, tt), kk, vv) - } - - private[this] def join2[A, B](tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = - if(tl eq null) tr - else if(tr eq null) tl - else { - val (ttl, k, v) = splitLast(tl) - join(ttl, k, v, tr) - } - - private[this] def _union[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = - if((t1 eq null) || (t1 eq t2)) t2 - else if(t2 eq null) t1 - else { - val (l1, _, r1, k1) = split(t1, t2.key) - val tl = _union(l1, t2.left) - val tr = _union(r1, t2.right) - join(tl, k1, t2.value, tr) - } - - private[this] def _intersect[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = - if((t1 eq null) || (t2 eq null)) null - else if (t1 eq t2) t1 - else { - val (l1, b, r1, k1) = split(t1, t2.key) - val tl = _intersect(l1, t2.left) - val tr = _intersect(r1, t2.right) - if(b ne null) join(tl, k1, t2.value, tr) - else join2(tl, tr) - } - - private[this] def _difference[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = - if((t1 eq null) || (t2 eq null)) t1 - else if (t1 eq t2) null - else { - val (l1, _, r1, k1) = split(t1, t2.key) - val tl = _difference(l1, t2.left) - val tr = _difference(r1, t2.right) - join2(tl, tr) - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/Seq.scala b/tests/pos-special/stdlib/collection/immutable/Seq.scala index d575c3aaf14a..5184cadaccae 100644 --- a/tests/pos-special/stdlib/collection/immutable/Seq.scala +++ b/tests/pos-special/stdlib/collection/immutable/Seq.scala @@ -30,7 +30,7 @@ trait Seq[+A] extends Iterable[A] * @define coll immutable sequence * @define Coll `immutable.Seq` */ -trait SeqOps[+A, +CC[_], +C] extends Any with collection.SeqOps[A, CC, C] +trait SeqOps[+A, +CC[_], +C] extends AnyRef with collection.SeqOps[A, CC, C] /** * $factoryInfo diff --git a/tests/pos-special/stdlib/collection/immutable/SeqMap.scala b/tests/pos-special/stdlib/collection/immutable/SeqMap.scala deleted file mode 100644 index 6c955fd52fc2..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/SeqMap.scala +++ /dev/null @@ -1,278 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import scala.collection.mutable.{Builder, ReusableBuilder} -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** A base trait for ordered, immutable maps. - * - * Note that the [[equals]] method for [[SeqMap]] compares key-value pairs - * without regard to ordering. - * - * All behavior is defined in terms of the abstract methods in `SeqMap`. - * It is sufficient for concrete subclasses to implement those methods. - * Methods that return a new map, in particular [[removed]] and [[updated]], must preserve ordering. - * - * @tparam K the type of the keys contained in this linked map. - * @tparam V the type of the values associated with the keys in this linked map. - * - * @define coll immutable seq map - * @define Coll `immutable.SeqMap` - */ - -trait SeqMap[K, +V] - extends Map[K, V] - with collection.SeqMap[K, V] - with MapOps[K, V, SeqMap, SeqMap[K, V]] - with MapFactoryDefaults[K, V, SeqMap, Iterable] { - override def mapFactory: MapFactory[SeqMap] = SeqMap -} - - -object SeqMap extends MapFactory[SeqMap] { - def empty[K, V]: SeqMap[K, V] = EmptySeqMap.asInstanceOf[SeqMap[K, V]] - - def from[K, V](it: collection.IterableOnce[(K, V)]^): SeqMap[K, V] = - it match { - case sm: SeqMap[K, V] => sm - case _ => (newBuilder[K, V] ++= it).result() - } - - def newBuilder[K, V]: Builder[(K, V), SeqMap[K, V]] = new SeqMapBuilderImpl - - @SerialVersionUID(3L) - private object EmptySeqMap extends SeqMap[Any, Nothing] with Serializable { - override def size: Int = 0 - override def knownSize: Int = 0 - override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key) - override def contains(key: Any) = false - def get(key: Any): Option[Nothing] = None - override def getOrElse [V1](key: Any, default: => V1): V1 = default - def iterator: Iterator[(Any, Nothing)] = Iterator.empty - def updated [V1] (key: Any, value: V1): SeqMap[Any, V1] = new SeqMap1(key, value) - def removed(key: Any): SeqMap[Any, Nothing] = this - } - - @SerialVersionUID(3L) - private final class SeqMap1[K, +V](key1: K, value1: V) extends SeqMap[K,V] with Serializable { - override def size: Int = 1 - override def knownSize: Int = 1 - override def apply(key: K) = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K) = key == key1 - def get(key: K): Option[V] = - if (key == key1) Some(value1) else None - override def getOrElse [V1 >: V](key: K, default: => V1): V1 = - if (key == key1) value1 else default - def iterator = Iterator.single((key1, value1)) - def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = - if (key == key1) new SeqMap1(key1, value) - else new SeqMap2(key1, value1, key, value) - def removed(key: K): SeqMap[K, V] = - if (key == key1) SeqMap.empty else this - override def foreach[U](f: ((K, V)) => U): Unit = { - f((key1, value1)) - } - override def foreachEntry[U](f: (K, V) => U): Unit = { - f(key1, value1) - } - } - - @SerialVersionUID(3L) - private final class SeqMap2[K, +V](key1: K, value1: V, key2: K, value2: V) extends SeqMap[K,V] with Serializable { - override def size: Int = 2 - override def knownSize: Int = 2 - override def apply(key: K) = - if (key == key1) value1 - else if (key == key2) value2 - else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K) = (key == key1) || (key == key2) - def get(key: K): Option[V] = - if (key == key1) Some(value1) - else if (key == key2) Some(value2) - else None - override def getOrElse [V1 >: V](key: K, default: => V1): V1 = - if (key == key1) value1 - else if (key == key2) value2 - else default - def iterator = ((key1, value1) :: (key2, value2) :: Nil).iterator - def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = - if (key == key1) new SeqMap2(key1, value, key2, value2) - else if (key == key2) new SeqMap2(key1, value1, key2, value) - else new SeqMap3(key1, value1, key2, value2, key, value) - def removed(key: K): SeqMap[K, V] = - if (key == key1) new SeqMap1(key2, value2) - else if (key == key2) new SeqMap1(key1, value1) - else this - override def foreach[U](f: ((K, V)) => U): Unit = { - f((key1, value1)); f((key2, value2)) - } - override def foreachEntry[U](f: (K, V) => U): Unit = { - f(key1, value1) - f(key2, value2) - } - } - - @SerialVersionUID(3L) - private class SeqMap3[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V) extends SeqMap[K,V] with Serializable { - override def size: Int = 3 - override def knownSize: Int = 3 - override def apply(key: K) = - if (key == key1) value1 - else if (key == key2) value2 - else if (key == key3) value3 - else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) - def get(key: K): Option[V] = - if (key == key1) Some(value1) - else if (key == key2) Some(value2) - else if (key == key3) Some(value3) - else None - override def getOrElse [V1 >: V](key: K, default: => V1): V1 = - if (key == key1) value1 - else if (key == key2) value2 - else if (key == key3) value3 - else default - def iterator = ((key1, value1) :: (key2, value2) :: (key3, value3) :: Nil).iterator - def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = - if (key == key1) new SeqMap3(key1, value, key2, value2, key3, value3) - else if (key == key2) new SeqMap3(key1, value1, key2, value, key3, value3) - else if (key == key3) new SeqMap3(key1, value1, key2, value2, key3, value) - else new SeqMap4(key1, value1, key2, value2, key3, value3, key, value) - def removed(key: K): SeqMap[K, V] = - if (key == key1) new SeqMap2(key2, value2, key3, value3) - else if (key == key2) new SeqMap2(key1, value1, key3, value3) - else if (key == key3) new SeqMap2(key1, value1, key2, value2) - else this - override def foreach[U](f: ((K, V)) => U): Unit = { - f((key1, value1)); f((key2, value2)); f((key3, value3)) - } - override def foreachEntry[U](f: (K, V) => U): Unit = { - f(key1, value1) - f(key2, value2) - f(key3, value3) - } - } - - @SerialVersionUID(3L) - private final class SeqMap4[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V, key4: K, value4: V) extends SeqMap[K,V] with Serializable { - override def size: Int = 4 - override def knownSize: Int = 4 - override def apply(key: K) = - if (key == key1) value1 - else if (key == key2) value2 - else if (key == key3) value3 - else if (key == key4) value4 - else throw new NoSuchElementException("key not found: " + key) - override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) || (key == key4) - def get(key: K): Option[V] = - if (key == key1) Some(value1) - else if (key == key2) Some(value2) - else if (key == key3) Some(value3) - else if (key == key4) Some(value4) - else None - override def getOrElse [V1 >: V](key: K, default: => V1): V1 = - if (key == key1) value1 - else if (key == key2) value2 - else if (key == key3) value3 - else if (key == key4) value4 - else default - def iterator = ((key1, value1) :: (key2, value2) :: (key3, value3) :: (key4, value4) :: Nil).iterator - def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = - if (key == key1) new SeqMap4(key1, value, key2, value2, key3, value3, key4, value4) - else if (key == key2) new SeqMap4(key1, value1, key2, value, key3, value3, key4, value4) - else if (key == key3) new SeqMap4(key1, value1, key2, value2, key3, value, key4, value4) - else if (key == key4) new SeqMap4(key1, value1, key2, value2, key3, value3, key4, value) - else { - // Directly create the elements for performance reasons - val fields = Vector(key1, key2, key3, key4, key) - val underlying: Map[K, (Int, V1)] = - HashMap( - (key1, (0, value1)), - (key2, (1, value2)), - (key3, (2, value3)), - (key4, (3, value4)), - (key, (4, value)) - ) - new VectorMap(fields, underlying) - } - def removed(key: K): SeqMap[K, V] = - if (key == key1) new SeqMap3(key2, value2, key3, value3, key4, value4) - else if (key == key2) new SeqMap3(key1, value1, key3, value3, key4, value4) - else if (key == key3) new SeqMap3(key1, value1, key2, value2, key4, value4) - else if (key == key4) new SeqMap3(key1, value1, key2, value2, key3, value3) - else this - override def foreach[U](f: ((K, V)) => U): Unit = { - f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4)) - } - override def foreachEntry[U](f: (K, V) => U): Unit = { - f(key1, value1) - f(key2, value2) - f(key3, value3) - f(key4, value4) - } - - private[SeqMap] def buildTo[V1 >: V](builder: Builder[(K, V1), SeqMap[K, V1]]): builder.type = - builder.addOne((key1, value1)).addOne((key2, value2)).addOne((key3, value3)).addOne((key4, value4)) - } - - private final class SeqMapBuilderImpl[K, V] extends ReusableBuilder[(K, V), SeqMap[K, V]] { - private[this] var elems: SeqMap[K, V] @uncheckedCaptures = SeqMap.empty - private[this] var switchedToVectorMapBuilder: Boolean = false - private[this] var vectorMapBuilder: VectorMapBuilder[K, V] @uncheckedCaptures = _ - - override def clear(): Unit = { - elems = SeqMap.empty - if (vectorMapBuilder != null) { - vectorMapBuilder.clear() - } - switchedToVectorMapBuilder = false - } - - override def result(): SeqMap[K, V] = - if (switchedToVectorMapBuilder) vectorMapBuilder.result() else elems - - def addOne(elem: (K, V)) = { - if (switchedToVectorMapBuilder) { - vectorMapBuilder.addOne(elem) - } else if (elems.size < 4) { - elems = elems + elem - } else { - // assert(elems.size == 4) - if (elems.contains(elem._1)) { - elems = elems + elem // will not increase the size of the map - } else { - switchedToVectorMapBuilder = true - if (vectorMapBuilder == null) { - vectorMapBuilder = new VectorMapBuilder - } - elems.asInstanceOf[SeqMap4[K, V]].buildTo(vectorMapBuilder) - vectorMapBuilder.addOne(elem) - } - } - - this - } - - override def addAll(xs: IterableOnce[(K, V)]^): this.type = - if (switchedToVectorMapBuilder) { - vectorMapBuilder.addAll(xs) - this - } else { - super.addAll(xs) - } - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/Set.scala b/tests/pos-special/stdlib/collection/immutable/Set.scala deleted file mode 100644 index ac92f81b2013..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/Set.scala +++ /dev/null @@ -1,400 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import scala.collection.immutable.Set.Set4 -import scala.collection.mutable.{Builder, ReusableBuilder} -import language.experimental.captureChecking -import annotation.unchecked.uncheckedCaptures - -/** Base trait for immutable set collections */ -trait Set[A] extends Iterable[A] - with collection.Set[A] - with SetOps[A, Set, Set[A]] - with IterableFactoryDefaults[A, Set] { - override def iterableFactory: IterableFactory[Set] = Set -} - -/** Base trait for immutable set operations - * - * @define coll immutable set - * @define Coll `immutable.Set` - */ -trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]] - extends collection.SetOps[A, CC, C] { - - /** Creates a new set with an additional element, unless the element is - * already present. - * - * @param elem the element to be added - * @return a new set that contains all elements of this set and that also - * contains `elem`. - */ - def incl(elem: A): C - - /** Alias for `incl` */ - override final def + (elem: A): C = incl(elem) // like in collection.Set but not deprecated - - /** Creates a new set with a given element removed from this set. - * - * @param elem the element to be removed - * @return a new set that contains all elements of this set but that does not - * contain `elem`. - */ - def excl(elem: A): C - - /** Alias for `excl` */ - @`inline` final override def - (elem: A): C = excl(elem) - - def diff(that: collection.Set[A]): C = - foldLeft(empty)((result, elem) => if (that contains elem) result else result + elem) - - /** Creates a new $coll from this $coll by removing all elements of another - * collection. - * - * @param that the collection containing the elements to remove. - * @return a new $coll with the given elements removed, omitting duplicates. - */ - def removedAll(that: IterableOnce[A]): C = that.iterator.foldLeft[C](coll)(_ - _) - - /** Alias for removedAll */ - override final def -- (that: IterableOnce[A]): C = removedAll(that) -} - -trait StrictOptimizedSetOps[A, +CC[X], +C <: SetOps[A, CC, C]] - extends SetOps[A, CC, C] - with collection.StrictOptimizedSetOps[A, CC, C] - with StrictOptimizedIterableOps[A, CC, C] { - - override def concat(that: collection.IterableOnce[A]): C = { - var result: C = coll - val it = that.iterator - while (it.hasNext) result = result + it.next() - result - } -} - -/** - * $factoryInfo - * @define coll immutable set - * @define Coll `immutable.Set` - */ -@SerialVersionUID(3L) -object Set extends IterableFactory[Set] { - - def empty[A]: Set[A] = EmptySet.asInstanceOf[Set[A]] - - def from[E](it: collection.IterableOnce[E]^): Set[E] = - it match { - // We want `SortedSet` (and subclasses, such as `BitSet`) to - // rebuild themselves to avoid element type widening issues - case _: SortedSet[E] => (newBuilder[E] ++= it).result() - case _ if it.knownSize == 0 => empty[E] - case s: Set[E] => s - case _ => (newBuilder[E] ++= it).result() - } - - def newBuilder[A]: Builder[A, Set[A]] = new SetBuilderImpl[A] - - /** An optimized representation for immutable empty sets */ - @SerialVersionUID(3L) - private object EmptySet extends AbstractSet[Any] with Serializable { - override def size: Int = 0 - override def isEmpty = true - override def knownSize: Int = size - override def filter(pred: Any => Boolean): Set[Any] = this - override def filterNot(pred: Any => Boolean): Set[Any] = this - override def removedAll(that: IterableOnce[Any]): Set[Any] = this - override def diff(that: collection.Set[Any]): Set[Any] = this - override def subsetOf(that: collection.Set[Any]): Boolean = true - override def intersect(that: collection.Set[Any]): Set[Any] = this - override def view: View[Any] = View.empty - def contains(elem: Any): Boolean = false - def incl(elem: Any): Set[Any] = new Set1(elem) - def excl(elem: Any): Set[Any] = this - def iterator: Iterator[Any] = Iterator.empty - override def foreach[U](f: Any => U): Unit = () - } - private[collection] def emptyInstance: Set[Any] = EmptySet - - @SerialVersionUID(3L) - private abstract class SetNIterator[A](n: Int) extends AbstractIterator[A], Serializable, Pure { - private[this] var current = 0 - private[this] var remainder = n - override def knownSize: Int = remainder - def hasNext = remainder > 0 - def apply(i: Int): A - def next(): A = - if (hasNext) { - val r = apply(current) - current += 1 - remainder -= 1 - r - } else Iterator.empty.next() - - override def drop(n: Int): Iterator[A] = { - if (n > 0) { - current += n - remainder = Math.max(0, remainder - n) - } - this - } - } - - /** An optimized representation for immutable sets of size 1 */ - @SerialVersionUID(3L) - final class Set1[A] private[collection] (elem1: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { - override def size: Int = 1 - override def isEmpty = false - override def knownSize: Int = size - def contains(elem: A): Boolean = elem == elem1 - def incl(elem: A): Set[A] = - if (contains(elem)) this - else new Set2(elem1, elem) - def excl(elem: A): Set[A] = - if (elem == elem1) Set.empty - else this - def iterator: Iterator[A] = Iterator.single(elem1) - override def foreach[U](f: A => U): Unit = f(elem1) - override def exists(p: A => Boolean): Boolean = p(elem1) - override def forall(p: A => Boolean): Boolean = p(elem1) - override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = - if (pred(elem1) != isFlipped) this else Set.empty - - override def find(p: A => Boolean): Option[A] = - if (p(elem1)) Some(elem1) - else None - override def head: A = elem1 - override def tail: Set[A] = Set.empty - } - - /** An optimized representation for immutable sets of size 2 */ - @SerialVersionUID(3L) - final class Set2[A] private[collection] (elem1: A, elem2: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { - override def size: Int = 2 - override def isEmpty = false - override def knownSize: Int = size - def contains(elem: A): Boolean = elem == elem1 || elem == elem2 - def incl(elem: A): Set[A] = - if (contains(elem)) this - else new Set3(elem1, elem2, elem) - def excl(elem: A): Set[A] = - if (elem == elem1) new Set1(elem2) - else if (elem == elem2) new Set1(elem1) - else this - def iterator: Iterator[A] = new SetNIterator[A](size) { - def apply(i: Int) = getElem(i) - } - private def getElem(i: Int) = i match { case 0 => elem1 case 1 => elem2 } - - override def foreach[U](f: A => U): Unit = { - f(elem1); f(elem2) - } - override def exists(p: A => Boolean): Boolean = { - p(elem1) || p(elem2) - } - override def forall(p: A => Boolean): Boolean = { - p(elem1) && p(elem2) - } - override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { - var r1: A = null.asInstanceOf[A] - var n = 0 - if (pred(elem1) != isFlipped) { r1 = elem1; n += 1} - if (pred(elem2) != isFlipped) { if (n == 0) r1 = elem2; n += 1} - - n match { - case 0 => Set.empty - case 1 => new Set1(r1) - case 2 => this - } - } - override def find(p: A => Boolean): Option[A] = { - if (p(elem1)) Some(elem1) - else if (p(elem2)) Some(elem2) - else None - } - override def head: A = elem1 - override def tail: Set[A] = new Set1(elem2) - } - - /** An optimized representation for immutable sets of size 3 */ - @SerialVersionUID(3L) - final class Set3[A] private[collection] (elem1: A, elem2: A, elem3: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { - override def size: Int = 3 - override def isEmpty = false - override def knownSize: Int = size - def contains(elem: A): Boolean = - elem == elem1 || elem == elem2 || elem == elem3 - def incl(elem: A): Set[A] = - if (contains(elem)) this - else new Set4(elem1, elem2, elem3, elem) - def excl(elem: A): Set[A] = - if (elem == elem1) new Set2(elem2, elem3) - else if (elem == elem2) new Set2(elem1, elem3) - else if (elem == elem3) new Set2(elem1, elem2) - else this - def iterator: Iterator[A] = new SetNIterator[A](size) { - def apply(i: Int) = getElem(i) - } - private def getElem(i: Int) = i match { case 0 => elem1 case 1 => elem2 case 2 => elem3 } - - override def foreach[U](f: A => U): Unit = { - f(elem1); f(elem2); f(elem3) - } - override def exists(p: A => Boolean): Boolean = { - p(elem1) || p(elem2) || p(elem3) - } - override def forall(p: A => Boolean): Boolean = { - p(elem1) && p(elem2) && p(elem3) - } - override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { - var r1, r2: A = null.asInstanceOf[A] - var n = 0 - if (pred(elem1) != isFlipped) { r1 = elem1; n += 1} - if (pred(elem2) != isFlipped) { if (n == 0) r1 = elem2 else r2 = elem2; n += 1} - if (pred(elem3) != isFlipped) { if (n == 0) r1 = elem3 else if (n == 1) r2 = elem3; n += 1} - - n match { - case 0 => Set.empty - case 1 => new Set1(r1) - case 2 => new Set2(r1, r2) - case 3 => this - } - } - override def find(p: A => Boolean): Option[A] = { - if (p(elem1)) Some(elem1) - else if (p(elem2)) Some(elem2) - else if (p(elem3)) Some(elem3) - else None - } - override def head: A = elem1 - override def tail: Set[A] = new Set2(elem2, elem3) - } - - /** An optimized representation for immutable sets of size 4 */ - @SerialVersionUID(3L) - final class Set4[A] private[collection] (elem1: A, elem2: A, elem3: A, elem4: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { - override def size: Int = 4 - override def isEmpty = false - override def knownSize: Int = size - def contains(elem: A): Boolean = - elem == elem1 || elem == elem2 || elem == elem3 || elem == elem4 - def incl(elem: A): Set[A] = - if (contains(elem)) this - else HashSet.empty[A] + elem1 + elem2 + elem3 + elem4 + elem - def excl(elem: A): Set[A] = - if (elem == elem1) new Set3(elem2, elem3, elem4) - else if (elem == elem2) new Set3(elem1, elem3, elem4) - else if (elem == elem3) new Set3(elem1, elem2, elem4) - else if (elem == elem4) new Set3(elem1, elem2, elem3) - else this - def iterator: Iterator[A] = new SetNIterator[A](size) { - def apply(i: Int) = getElem(i) - } - private def getElem(i: Int) = i match { case 0 => elem1 case 1 => elem2 case 2 => elem3 case 3 => elem4 } - - override def foreach[U](f: A => U): Unit = { - f(elem1); f(elem2); f(elem3); f(elem4) - } - override def exists(p: A => Boolean): Boolean = { - p(elem1) || p(elem2) || p(elem3) || p(elem4) - } - override def forall(p: A => Boolean): Boolean = { - p(elem1) && p(elem2) && p(elem3) && p(elem4) - } - override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { - var r1, r2, r3: A = null.asInstanceOf[A] - var n = 0 - if (pred(elem1) != isFlipped) { r1 = elem1; n += 1} - if (pred(elem2) != isFlipped) { if (n == 0) r1 = elem2 else r2 = elem2; n += 1} - if (pred(elem3) != isFlipped) { if (n == 0) r1 = elem3 else if (n == 1) r2 = elem3 else r3 = elem3; n += 1} - if (pred(elem4) != isFlipped) { if (n == 0) r1 = elem4 else if (n == 1) r2 = elem4 else if (n == 2) r3 = elem4; n += 1} - - n match { - case 0 => Set.empty - case 1 => new Set1(r1) - case 2 => new Set2(r1, r2) - case 3 => new Set3(r1, r2, r3) - case 4 => this - } - } - - override def find(p: A => Boolean): Option[A] = { - if (p(elem1)) Some(elem1) - else if (p(elem2)) Some(elem2) - else if (p(elem3)) Some(elem3) - else if (p(elem4)) Some(elem4) - else None - } - override def head: A = elem1 - override def tail: Set[A] = new Set3(elem2, elem3, elem4) - - private[immutable] def buildTo(builder: Builder[A, Set[A]]): builder.type = - builder.addOne(elem1).addOne(elem2).addOne(elem3).addOne(elem4) - } -} - -/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ -abstract class AbstractSet[A] extends scala.collection.AbstractSet[A] with Set[A] - -/** Builder for Set. - * $multipleResults - */ -private final class SetBuilderImpl[A] extends ReusableBuilder[A, Set[A]] { - private[this] var elems: Set[A @uncheckedCaptures] = Set.empty - private[this] var switchedToHashSetBuilder: Boolean = false - private[this] var hashSetBuilder: HashSetBuilder[A @uncheckedCaptures] = _ - - override def clear(): Unit = { - elems = Set.empty - if (hashSetBuilder != null) { - hashSetBuilder.clear() - } - switchedToHashSetBuilder = false - } - - override def result(): Set[A] = - if (switchedToHashSetBuilder) hashSetBuilder.result() else elems - - def addOne(elem: A) = { - if (switchedToHashSetBuilder) { - hashSetBuilder.addOne(elem) - } else if (elems.size < 4) { - elems = elems + elem - } else { - // assert(elems.size == 4) - if (elems.contains(elem)) { - () // do nothing - } else { - switchedToHashSetBuilder = true - if (hashSetBuilder == null) { - hashSetBuilder = new HashSetBuilder - } - elems.asInstanceOf[Set4[A]].buildTo(hashSetBuilder) - hashSetBuilder.addOne(elem) - } - } - - this - } - - override def addAll(xs: IterableOnce[A]^): this.type = - if (switchedToHashSetBuilder) { - hashSetBuilder.addAll(xs) - this - } else { - super.addAll(xs) - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/SortedMap.scala b/tests/pos-special/stdlib/collection/immutable/SortedMap.scala deleted file mode 100644 index 9587502fd908..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/SortedMap.scala +++ /dev/null @@ -1,178 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import scala.annotation.unchecked.uncheckedVariance -import scala.collection.mutable.Builder -import language.experimental.captureChecking - -/** An immutable map whose key-value pairs are sorted according to an [[scala.math.Ordering]] on the keys. - * - * Allows for range queries to be performed on its keys, and implementations must guarantee that traversal happens in - * sorted order, according to the map's [[scala.math.Ordering]]. - * - * @example {{{ - * import scala.collection.immutable.SortedMap - * - * // Make a SortedMap via the companion object factory - * val weekdays = SortedMap( - * 2 -> "Monday", - * 3 -> "Tuesday", - * 4 -> "Wednesday", - * 5 -> "Thursday", - * 6 -> "Friday" - * ) - * // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday) - * - * val days = weekdays ++ List(1 -> "Sunday", 7 -> "Saturday") - * // TreeMap(1 -> Sunday, 2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday, 7 -> Saturday) - * - * val day3 = days.get(3) // Some("Tuesday") - * - * val rangeOfDays = days.range(2, 5) // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday) - * - * val daysUntil2 = days.rangeUntil(2) // TreeMap(1 -> Sunday) - * val daysTo2 = days.rangeTo(2) // TreeMap(1 -> Sunday, 2 -> Monday) - * val daysAfter5 = days.rangeFrom(5) // TreeMap(5 -> Thursday, 6 -> Friday, 7 -> Saturday) - * }}} - * - * @tparam K the type of the keys contained in this tree map. - * @tparam V the type of the values associated with the keys. - */ -trait SortedMap[K, +V] - extends Map[K, V] - with collection.SortedMap[K, V] - with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] - with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map] { - - override def unsorted: Map[K, V] = this - - override def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap - - /** The same map with a given default function. - * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. - * are not affected by `withDefault`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d the function mapping keys to values, used for non-present keys - * @return a wrapper of the map with a default value - */ - override def withDefault[V1 >: V](d: K -> V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, d) - - /** The same map with a given default value. - * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. - * are not affected by `withDefaultValue`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d default value used for non-present keys - * @return a wrapper of the map with a default value - */ - override def withDefaultValue[V1 >: V](d: V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, _ => d) -} - -trait SortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] - extends MapOps[K, V, Map, C] with collection.SortedMapOps[K, V, CC, C] { self => - - protected def coll: C with CC[K, V] - - def unsorted: Map[K, V] - - override def keySet: SortedSet[K] = new ImmutableKeySortedSet - - /** The implementation class of the set returned by `keySet` */ - protected class ImmutableKeySortedSet extends AbstractSet[K] with SortedSet[K] with GenKeySet with GenKeySortedSet { - def rangeImpl(from: Option[K], until: Option[K]): SortedSet[K] = { - val map = self.rangeImpl(from, until) - new map.ImmutableKeySortedSet - } - def incl(elem: K): SortedSet[K] = fromSpecific(this).incl(elem) - def excl(elem: K): SortedSet[K] = fromSpecific(this).excl(elem) - } - - // We override these methods to fix their return type (which would be `Map` otherwise) - def updated[V1 >: V](key: K, value: V1): CC[K, V1] - @`inline` final override def +[V1 >: V](kv: (K, V1)): CC[K, V1] = updated(kv._1, kv._2) - override def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): CC[K, V1] = { - // Implementation has been copied from `MapOps` - val previousValue = this.get(key) - remappingFunction(previousValue) match { - case None => previousValue.fold(coll)(_ => this.removed(key).coll) - case Some(nextValue) => - if (previousValue.exists(_.asInstanceOf[AnyRef] eq nextValue.asInstanceOf[AnyRef])) coll - else coll.updated(key, nextValue) - } - } - override def transform[W](f: (K, V) => W): CC[K, W] = map({ case (k, v) => (k, f(k, v)) })(ordering) -} - -trait StrictOptimizedSortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] - extends SortedMapOps[K, V, CC, C] - with collection.StrictOptimizedSortedMapOps[K, V, CC, C] - with StrictOptimizedMapOps[K, V, Map, C] { - - override def concat[V2 >: V](xs: collection.IterableOnce[(K, V2)]^): CC[K, V2] = { - var result: CC[K, V2] = coll - val it = xs.iterator - while (it.hasNext) result = result + it.next() - result - } -} - -@SerialVersionUID(3L) -object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { - - override def from[K: Ordering, V](it: IterableOnce[(K, V)]^): SortedMap[K, V] = it match { - case sm: SortedMap[K, V] if Ordering[K] == sm.ordering => sm - case _ => super.from(it) - } - - final class WithDefault[K, +V](underlying: SortedMap[K, V], defaultValue: K -> V) - extends Map.WithDefault[K, V](underlying, defaultValue) - with SortedMap[K, V] - with SortedMapOps[K, V, SortedMap, WithDefault[K, V]] with Serializable { - - implicit def ordering: Ordering[K] = underlying.ordering - - override def sortedMapFactory: SortedMapFactory[SortedMap] = underlying.sortedMapFactory - - def iteratorFrom(start: K): scala.collection.Iterator[(K, V)] = underlying.iteratorFrom(start) - - def keysIteratorFrom(start: K): scala.collection.Iterator[K] = underlying.keysIteratorFrom(start) - - def rangeImpl(from: Option[K], until: Option[K]): WithDefault[K, V] = - new WithDefault[K, V](underlying.rangeImpl(from, until), defaultValue) - - // Need to override following methods to match type signatures of `SortedMap.WithDefault` - // for operations preserving default value - - override def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = - new WithDefault[K, V1](underlying.updated(key, value), defaultValue) - - override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]^): WithDefault[K, V2] = - new WithDefault( underlying.concat(xs) , defaultValue) - - override def removed(key: K): WithDefault[K, V] = new WithDefault[K, V](underlying.removed(key), defaultValue) - - override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) - - override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V) @uncheckedVariance]^): WithDefault[K, V] = - new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue) - - override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] @uncheckedVariance = - SortedMap.newBuilder.mapResult((p: SortedMap[K, V]) => new WithDefault[K, V](p, defaultValue)) - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/SortedSet.scala b/tests/pos-special/stdlib/collection/immutable/SortedSet.scala deleted file mode 100644 index 874abcaecda1..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/SortedSet.scala +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable -import language.experimental.captureChecking - -/** Base trait for sorted sets */ -trait SortedSet[A] - extends Set[A] - with collection.SortedSet[A] - with SortedSetOps[A, SortedSet, SortedSet[A]] - with SortedSetFactoryDefaults[A, SortedSet, Set] { - - override def unsorted: Set[A] = this - - override def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet -} - -/** - * @define coll immutable sorted set - * @define Coll `immutable.SortedSet` - */ -trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] - extends SetOps[A, Set, C] - with collection.SortedSetOps[A, CC, C] { - - def unsorted: Set[A] -} - -trait StrictOptimizedSortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] - extends SortedSetOps[A, CC, C] - with collection.StrictOptimizedSortedSetOps[A, CC, C] - with StrictOptimizedSetOps[A, Set, C] { -} - -/** - * $factoryInfo - * @define coll immutable sorted set - * @define Coll `immutable.SortedSet` - */ -@SerialVersionUID(3L) -object SortedSet extends SortedIterableFactory.Delegate[SortedSet](TreeSet) { - override def from[E: Ordering](it: IterableOnce[E]^): SortedSet[E] = it match { - case ss: SortedSet[E] if Ordering[E] == ss.ordering => ss - case _ => super.from(it) - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala b/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala deleted file mode 100644 index b1e4622971fb..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable -import language.experimental.captureChecking -import annotation.unchecked.uncheckedCaptures - -/** - * Trait that overrides operations to take advantage of strict builders. - */ -trait StrictOptimizedSeqOps[+A, +CC[_], +C] - extends Any - with SeqOps[A, CC, C] - with collection.StrictOptimizedSeqOps[A, CC, C] - with StrictOptimizedIterableOps[A, CC, C] { - - override def distinctBy[B](f: A -> B): C = { - if (lengthCompare(1) <= 0) coll - else { - val builder = newSpecificBuilder - val seen = mutable.HashSet.empty[B @uncheckedCaptures] - val it = this.iterator - var different = false - while (it.hasNext) { - val next = it.next() - if (seen.add(f(next))) builder += next else different = true - } - if (different) builder.result() else coll - } - } - - override def updated[B >: A](index: Int, elem: B): CC[B] = { - if (index < 0) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${if (knownSize>=0) knownSize else "unknown"})") - val b = iterableFactory.newBuilder[B] - if (knownSize >= 0) { - b.sizeHint(size) - } - var i = 0 - val it = iterator - while (i < index && it.hasNext) { - b += it.next() - i += 1 - } - if (!it.hasNext) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${i-1})") - b += elem - it.next() - while (it.hasNext) b += it.next() - b.result() - } - - override def patch[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): CC[B] = { - val b = iterableFactory.newBuilder[B] - var i = 0 - val it = iterator - while (i < from && it.hasNext) { - b += it.next() - i += 1 - } - b ++= other - i = replaced - while (i > 0 && it.hasNext) { - it.next() - i -= 1 - } - while (it.hasNext) b += it.next() - b.result() - } - - override def sorted[B >: A](implicit ord: Ordering[B]): C = super.sorted(ord) - -} diff --git a/tests/pos-special/stdlib/collection/immutable/TreeMap.scala b/tests/pos-special/stdlib/collection/immutable/TreeMap.scala deleted file mode 100644 index ff01ad7806ec..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/TreeMap.scala +++ /dev/null @@ -1,372 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import scala.annotation.tailrec -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializable -import scala.collection.immutable.{RedBlackTree => RB} -import scala.collection.mutable.ReusableBuilder -import scala.runtime.AbstractFunction2 -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** An immutable SortedMap whose values are stored in a red-black tree. - * - * This class is optimal when range queries will be performed, - * or when traversal in order of an ordering is desired. - * If you only need key lookups, and don't care in which order key-values - * are traversed in, consider using * [[scala.collection.immutable.HashMap]], - * which will generally have better performance. If you need insertion order, - * consider a * [[scala.collection.immutable.SeqMap]], which does not need to - * have an ordering supplied. - * - * @example {{{ - * import scala.collection.immutable.TreeMap - * - * // Make a TreeMap via the companion object factory - * val weekdays = TreeMap( - * 2 -> "Monday", - * 3 -> "Tuesday", - * 4 -> "Wednesday", - * 5 -> "Thursday", - * 6 -> "Friday" - * ) - * // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday) - * - * val days = weekdays ++ List(1 -> "Sunday", 7 -> "Saturday") - * // TreeMap(1 -> Sunday, 2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday, 7 -> Saturday) - * - * val day3 = days.get(3) // Some("Tuesday") - * - * val rangeOfDays = days.range(2, 5) // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday) - * - * val daysUntil2 = days.rangeUntil(2) // TreeMap(1 -> Sunday) - * val daysTo2 = days.rangeTo(2) // TreeMap(1 -> Sunday, 2 -> Monday) - * val daysAfter5 = days.rangeFrom(5) // TreeMap(5 -> Thursday, 6 -> Friday, 7 -> Saturday) - * }}} - * - * @tparam K the type of the keys contained in this tree map. - * @tparam V the type of the values associated with the keys. - * @param ordering the implicit ordering used to compare objects of type `A`. - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]] - * section on `Red-Black Trees` for more information. - * - * @define Coll immutable.TreeMap - * @define coll immutable tree map - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -final class TreeMap[K, +V] private (private val tree: RB.Tree[K, V])(implicit val ordering: Ordering[K]) - extends AbstractMap[K, V] - with SortedMap[K, V] - with StrictOptimizedSortedMapOps[K, V, TreeMap, TreeMap[K, V]] - with SortedMapFactoryDefaults[K, V, TreeMap, Iterable, Map] - with DefaultSerializable { - - def this()(implicit ordering: Ordering[K]) = this(null)(ordering) - private[immutable] def tree0: RB.Tree[K, V] = tree - - private[this] def newMapOrSelf[V1 >: V](t: RB.Tree[K, V1]): TreeMap[K, V1] = if(t eq tree) this else new TreeMap[K, V1](t) - - override def sortedMapFactory: SortedMapFactory[TreeMap] = TreeMap - - def iterator: Iterator[(K, V)] = RB.iterator(tree) - - def keysIteratorFrom(start: K): Iterator[K] = RB.keysIterator(tree, Some(start)) - - override def keySet: TreeSet[K] = new TreeSet(tree)(ordering) - - def iteratorFrom(start: K): Iterator[(K, V)] = RB.iterator(tree, Some(start)) - - override def valuesIteratorFrom(start: K): Iterator[V] = RB.valuesIterator(tree, Some(start)) - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = - shape.parUnbox( - scala.collection.convert.impl.AnyBinaryTreeStepper.from[(K, V), RB.Tree[K, V]]( - size, tree, _.left, _.right, x => (x.key, x.value) - ) - ) - - override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { - import scala.collection.convert.impl._ - type T = RB.Tree[K, V] - val s = shape.shape match { - case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Int]) - case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Long]) - case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree, _.left, _.right, _.key.asInstanceOf[Double]) - case _ => shape.parUnbox(AnyBinaryTreeStepper.from[K, T](size, tree, _.left, _.right, _.key)) - } - s.asInstanceOf[S with EfficientSplit] - } - - override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { - import scala.collection.convert.impl._ - type T = RB.Tree[K, V] - val s = shape.shape match { - case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Int]) - case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Long]) - case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Double]) - case _ => shape.parUnbox(AnyBinaryTreeStepper.from[V, T] (size, tree, _.left, _.right, _.value.asInstanceOf[V])) - } - s.asInstanceOf[S with EfficientSplit] - } - - def get(key: K): Option[V] = RB.get(tree, key) - override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { - val resultOrNull = RB.lookup(tree, key) - if (resultOrNull eq null) default - else resultOrNull.value - } - - def removed(key: K): TreeMap[K,V] = - newMapOrSelf(RB.delete(tree, key)) - - def updated[V1 >: V](key: K, value: V1): TreeMap[K, V1] = - newMapOrSelf(RB.update(tree, key, value, overwrite = true)) - - override def concat[V1 >: V](that: collection.IterableOnce[(K, V1)]^): TreeMap[K, V1] = - newMapOrSelf(that match { - case tm: TreeMap[K, V] @unchecked if ordering == tm.ordering => - RB.union(tree, tm.tree) - case ls: LinearSeq[(K,V1)] => - if (ls.isEmpty) tree //to avoid the creation of the adder - else { - val adder = new Adder[V1] - adder.addAll(ls) - adder.finalTree - } - case _ => - val adder = new Adder[V1] - val it = that.iterator - while (it.hasNext) { - adder.apply(it.next()) - } - adder.finalTree - }) - - override def removedAll(keys: IterableOnce[K]^): TreeMap[K, V] = keys match { - case ts: TreeSet[K] if ordering == ts.ordering => - newMapOrSelf(RB.difference(tree, ts.tree)) - case _ => super.removedAll(keys) - } - - /** A new TreeMap with the entry added is returned, - * assuming that key is not in the TreeMap. - * - * @tparam V1 type of the values of the new bindings, a supertype of `V` - * @param key the key to be inserted - * @param value the value to be associated with `key` - * @return a new $coll with the inserted binding, if it wasn't present in the map - */ - @deprecated("Use `updated` instead", "2.13.0") - def insert[V1 >: V](key: K, value: V1): TreeMap[K, V1] = { - assert(!RB.contains(tree, key)) - updated(key, value) - } - - def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = newMapOrSelf(RB.rangeImpl(tree, from, until)) - - override def minAfter(key: K): Option[(K, V)] = RB.minAfter(tree, key) match { - case null => Option.empty - case x => Some((x.key, x.value)) - } - - override def maxBefore(key: K): Option[(K, V)] = RB.maxBefore(tree, key) match { - case null => Option.empty - case x => Some((x.key, x.value)) - } - - override def range(from: K, until: K): TreeMap[K,V] = newMapOrSelf(RB.range(tree, from, until)) - - override def foreach[U](f: ((K, V)) => U): Unit = RB.foreach(tree, f) - override def foreachEntry[U](f: (K, V) => U): Unit = RB.foreachEntry(tree, f) - override def size: Int = RB.count(tree) - override def knownSize: Int = size - - override def isEmpty = size == 0 - - override def firstKey: K = RB.smallest(tree).key - - override def lastKey: K = RB.greatest(tree).key - - override def head: (K, V) = { - val smallest = RB.smallest(tree) - (smallest.key, smallest.value) - } - - override def last: (K, V) = { - val greatest = RB.greatest(tree) - (greatest.key, greatest.value) - } - - override def tail: TreeMap[K, V] = new TreeMap(RB.tail(tree)) - - override def init: TreeMap[K, V] = new TreeMap(RB.init(tree)) - - override def drop(n: Int): TreeMap[K, V] = { - if (n <= 0) this - else if (n >= size) empty - else new TreeMap(RB.drop(tree, n)) - } - - override def take(n: Int): TreeMap[K, V] = { - if (n <= 0) empty - else if (n >= size) this - else new TreeMap(RB.take(tree, n)) - } - - override def slice(from: Int, until: Int) = { - if (until <= from) empty - else if (from <= 0) take(until) - else if (until >= size) drop(from) - else new TreeMap(RB.slice(tree, from, until)) - } - - override def dropRight(n: Int): TreeMap[K, V] = take(size - math.max(n, 0)) - - override def takeRight(n: Int): TreeMap[K, V] = drop(size - math.max(n, 0)) - - private[this] def countWhile(p: ((K, V)) => Boolean): Int = { - var result = 0 - val it = iterator - while (it.hasNext && p(it.next())) result += 1 - result - } - - override def dropWhile(p: ((K, V)) => Boolean): TreeMap[K, V] = drop(countWhile(p)) - - override def takeWhile(p: ((K, V)) => Boolean): TreeMap[K, V] = take(countWhile(p)) - - override def span(p: ((K, V)) => Boolean): (TreeMap[K, V], TreeMap[K, V]) = splitAt(countWhile(p)) - - override def filter(f: ((K, V)) => Boolean): TreeMap[K, V] = - newMapOrSelf(RB.filterEntries[K, V](tree, (k, v) => f((k, v)))) - - override def partition(p: ((K, V)) => Boolean): (TreeMap[K, V], TreeMap[K, V]) = { - val (l, r) = RB.partitionEntries[K, V](tree, (k, v) => p((k, v))) - (newMapOrSelf(l), newMapOrSelf(r)) - } - - override def transform[W](f: (K, V) => W): TreeMap[K, W] = { - val t2 = RB.transform[K, V, W](tree, f) - if(t2 eq tree) this.asInstanceOf[TreeMap[K, W]] - else new TreeMap(t2) - } - - private final class Adder[B1 >: V] - extends RB.MapHelper[K, B1] with Function1[(K, B1), Unit] { - private var currentMutableTree: RB.Tree[K,B1] @uncheckedCaptures = tree0 - def finalTree = beforePublish(currentMutableTree) - override def apply(kv: (K, B1)): Unit = { - currentMutableTree = mutableUpd(currentMutableTree, kv._1, kv._2) - } - @tailrec def addAll(ls: LinearSeq[(K, B1)]): Unit = { - if (!ls.isEmpty) { - val kv = ls.head - currentMutableTree = mutableUpd(currentMutableTree, kv._1, kv._2) - addAll(ls.tail) - } - } - } - override def equals(obj: Any): Boolean = obj match { - case that: TreeMap[K @unchecked, _] if ordering == that.ordering => RB.entriesEqual(tree, that.tree) - case _ => super.equals(obj) - } - - override protected[this] def className = "TreeMap" -} - -/** $factoryInfo - * @define Coll immutable.TreeMap - * @define coll immutable tree map - */ -@SerialVersionUID(3L) -object TreeMap extends SortedMapFactory[TreeMap] { - - def empty[K : Ordering, V]: TreeMap[K, V] = new TreeMap() - - def from[K, V](it: IterableOnce[(K, V)]^)(implicit ordering: Ordering[K]): TreeMap[K, V] = - it match { - case tm: TreeMap[K, V] if ordering == tm.ordering => tm - case sm: scala.collection.SortedMap[K, V] if ordering == sm.ordering => - new TreeMap[K, V](RB.fromOrderedEntries(sm.iterator, sm.size)) - case _ => - var t: RB.Tree[K, V] = null - val i = it.iterator - while (i.hasNext) { - val (k, v) = i.next() - t = RB.update(t, k, v, overwrite = true) - } - new TreeMap[K, V](t) - } - - def newBuilder[K, V](implicit ordering: Ordering[K]): ReusableBuilder[(K, V), TreeMap[K, V]] = new TreeMapBuilder[K, V] - - private class TreeMapBuilder[K, V](implicit ordering: Ordering[K]) - extends RB.MapHelper[K, V] - with ReusableBuilder[(K, V), TreeMap[K, V]] { - type Tree = RB.Tree[K, V] - private var tree:Tree @uncheckedCaptures = null - - def addOne(elem: (K, V)): this.type = { - tree = mutableUpd(tree, elem._1, elem._2) - this - } - private object adder extends AbstractFunction2[K, V, Unit] { - // we cache tree to avoid the outer access to tree - // in the hot path (apply) - private[this] var accumulator: Tree @uncheckedCaptures = null - def addForEach(hasForEach: collection.Map[K, V]): Unit = { - accumulator = tree - hasForEach.foreachEntry(this) - tree = accumulator - // be friendly to GC - accumulator = null - } - - override def apply(key: K, value: V): Unit = { - accumulator = mutableUpd(accumulator, key, value) - } - } - - override def addAll(xs: IterableOnce[(K, V)]^): this.type = { - xs match { - // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++= - // for the moment we have to force immutability before the union - // which will waste some time and space - // calling `beforePublish` makes `tree` immutable - case ts: TreeMap[K, V] if ts.ordering == ordering => - if (tree eq null) tree = ts.tree0 - else tree = RB.union(beforePublish(tree), ts.tree0) - case that: collection.Map[K, V] => - //add avoiding creation of tuples - adder.addForEach(that) - case _ => - super.addAll(xs) - } - this - } - - override def clear(): Unit = { - tree = null - } - - override def result(): TreeMap[K, V] = new TreeMap[K, V](beforePublish(tree)) - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala b/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala deleted file mode 100644 index 91233669e5ca..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala +++ /dev/null @@ -1,651 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import scala.annotation.tailrec -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** This class implements an immutable map that preserves order using - * a hash map for the key to value mapping to provide efficient lookup, - * and a tree for the ordering of the keys to provide efficient - * insertion/modification order traversal and destructuring. - * - * By default insertion order (`TreeSeqMap.OrderBy.Insertion`) - * is used, but modification order (`TreeSeqMap.OrderBy.Modification`) - * can be used instead if so specified at creation. - * - * The `orderingBy(orderBy: TreeSeqMap.OrderBy): TreeSeqMap[K, V]` method - * can be used to switch to the specified ordering for the returned map. - * - * A key can be manually refreshed (i.e. placed at the end) via the - * `refresh(key: K): TreeSeqMap[K, V]` method (regardless of the ordering in - * use). - * - * Internally, an ordinal counter is increased for each insertion/modification - * and then the current ordinal is used as key in the tree map. After 2^32^ - * insertions/modifications the entire map is copied (thus resetting the ordinal - * counter). - * - * @tparam K the type of the keys contained in this map. - * @tparam V the type of the values associated with the keys in this map. - * @define coll immutable tree seq map - * @define Coll `immutable.TreeSeqMap` - */ -final class TreeSeqMap[K, +V] private ( - private val ordering: TreeSeqMap.Ordering[K], - private val mapping: TreeSeqMap.Mapping[K, V], - private val ordinal: Int, - val orderedBy: TreeSeqMap.OrderBy) - extends AbstractMap[K, V] - with SeqMap[K, V] - with MapOps[K, V, TreeSeqMap, TreeSeqMap[K, V]] - with StrictOptimizedIterableOps[(K, V), Iterable, TreeSeqMap[K, V]] - with StrictOptimizedMapOps[K, V, TreeSeqMap, TreeSeqMap[K, V]] - with MapFactoryDefaults[K, V, TreeSeqMap, Iterable] { - - import TreeSeqMap._ - - override protected[this] def className: String = "TreeSeqMap" - - override def mapFactory: MapFactory[TreeSeqMap] = TreeSeqMap - - override val size = mapping.size - - override def knownSize: Int = size - - override def isEmpty = size == 0 - - /* - // This should have been overridden in 2.13.0 but wasn't so it will have to wait since it is not forwards compatible - // Now handled in inherited method from scala.collection.MapFactoryDefaults instead. - override def empty = TreeSeqMap.empty[K, V](orderedBy) - */ - - def orderingBy(orderBy: OrderBy): TreeSeqMap[K, V] = { - if (orderBy == this.orderedBy) this - else if (isEmpty) TreeSeqMap.empty(orderBy) - else new TreeSeqMap(ordering, mapping, ordinal, orderBy) - } - - def updated[V1 >: V](key: K, value: V1): TreeSeqMap[K, V1] = { - mapping.get(key) match { - case e if ordinal == -1 && (orderedBy == OrderBy.Modification || e.isEmpty) => - // Reinsert into fresh instance to restart ordinal counting, expensive but only done after 2^32 updates. - TreeSeqMap.empty[K, V1](orderedBy) ++ this + (key -> value) - case Some((o, _)) if orderedBy == OrderBy.Insertion => - new TreeSeqMap( - ordering.include(o, key), - mapping.updated[(Int, V1)](key, (o, value)), - ordinal, // Do not increment the ordinal since the key is already present, i.e. o <= ordinal. - orderedBy) - case Some((o, _)) => - val o1 = increment(ordinal) - new TreeSeqMap( - ordering.exclude(o).append(o1, key), - mapping.updated[(Int, V1)](key, (o1, value)), - o1, - orderedBy) - case None => - val o1 = increment(ordinal) - new TreeSeqMap( - ordering.append(o1, key), - mapping.updated[(Int, V1)](key, (o1, value)), - o1, - orderedBy) - } - } - - def removed(key: K): TreeSeqMap[K, V] = { - mapping.get(key) match { - case Some((o, _)) => - new TreeSeqMap( - ordering.exclude(o), - mapping.removed(key), - ordinal, - orderedBy) - case None => - this - } - } - - def refresh(key: K): TreeSeqMap[K, V] = { - mapping.get(key) match { - case Some((o, _)) => - val o1 = increment(ordinal) - new TreeSeqMap( - ordering.exclude(o).append(o1, key), - mapping, - o1, - orderedBy) - case None => - this - } - } - - def get(key: K): Option[V] = mapping.get(key).map(value) - - def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { - private[this] val iter = ordering.iterator - - override def hasNext: Boolean = iter.hasNext - - override def next(): (K, V) = binding(iter.next()) - } - - override def keysIterator: Iterator[K] = new AbstractIterator[K] { - private[this] val iter = ordering.iterator - - override def hasNext: Boolean = iter.hasNext - - override def next(): K = iter.next() - } - - override def valuesIterator: Iterator[V] = new AbstractIterator[V] { - private[this] val iter = ordering.iterator - - override def hasNext: Boolean = iter.hasNext - - override def next(): V = value(binding(iter.next())) - } - - override def contains(key: K): Boolean = mapping.contains(key) - - override def head: (K, V) = binding(ordering.head) - - override def headOption = ordering.headOption.map(binding) - - override def last: (K, V) = binding(ordering.last) - - override def lastOption: Option[(K, V)] = ordering.lastOption.map(binding) - - override def tail: TreeSeqMap[K, V] = { - val (head, tail) = ordering.headTail - new TreeSeqMap(tail, mapping.removed(head), ordinal, orderedBy) - } - - override def init: TreeSeqMap[K, V] = { - val (init, last) = ordering.initLast - new TreeSeqMap(init, mapping.removed(last), ordinal, orderedBy) - } - - override def slice(from: Int, until: Int): TreeSeqMap[K, V] = { - val sz = size - if (sz == 0 || from >= until) TreeSeqMap.empty[K, V](orderedBy) - else { - val sz = size - val f = if (from >= 0) from else 0 - val u = if (until <= sz) until else sz - val l = u - f - if (l <= 0) TreeSeqMap.empty[K, V](orderedBy) - else if (l > sz / 2) { - // Remove front and rear incrementally if majority of elements are to be kept - val (front, rest) = ordering.splitAt(f) - val (ong, rear) = rest.splitAt(l) - var mng = this.mapping - val frontIter = front.iterator - while (frontIter.hasNext) { - mng = mng - frontIter.next() - } - val rearIter = rear.iterator - while (rearIter.hasNext) { - mng = mng - rearIter.next() - } - new TreeSeqMap(ong, mng, ordinal, orderedBy) - } else { - // Populate with builder otherwise - val bdr = newBuilder[K @uncheckedCaptures, V @uncheckedCaptures](orderedBy) - val iter = ordering.iterator - var i = 0 - while (i < f) { - iter.next() - i += 1 - } - while (i < u) { - val k = iter.next() - bdr.addOne((k, mapping(k)._2)) - i += 1 - } - bdr.result() - } - } - } - - override def map[K2, V2](f: ((K, V)) => (K2, V2)): TreeSeqMap[K2, V2] = { - val bdr = newBuilder[K2 @uncheckedCaptures, V2 @uncheckedCaptures](orderedBy) - val iter = ordering.iterator - while (iter.hasNext) { - val k = iter.next() - val (_, v) = mapping(k) - val (k2, v2) = f((k, v)) - bdr.addOne((k2, v2)) - } - bdr.result() - } - - override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): TreeSeqMap[K2, V2] = { - val bdr = newBuilder[K2 @uncheckedCaptures, V2 @uncheckedCaptures](orderedBy) - val iter = ordering.iterator - while (iter.hasNext) { - val k = iter.next() - val (_, v) = mapping(k) - val jter = f((k, v)).iterator - while (jter.hasNext) { - val (k2, v2) = jter.next() - bdr.addOne((k2, v2)) - } - } - bdr.result() - } - - override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): TreeSeqMap[K2, V2] = { - val bdr = newBuilder[K2 @uncheckedCaptures, V2 @uncheckedCaptures](orderedBy) - val iter = ordering.iterator - while (iter.hasNext) { - val k = iter.next() - val (_, v) = mapping(k) - pf.runWith({ case (k2, v2) => bdr.addOne((k2, v2)) })((k, v)) - } - bdr.result() - } - - override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): TreeSeqMap[K, V2] = { - var ong: Ordering[K] = ordering - var mng: Mapping[K, V2] = mapping - var ord = increment(ordinal) - val iter = suffix.iterator - while (iter.hasNext) { - val (k, v2) = iter.next() - mng.get(k) match { - case Some((o, v)) => - if (orderedBy == OrderBy.Insertion && v != v2) mng = mng.updated(k, (o, v2)) - else if (orderedBy == OrderBy.Modification) { - mng = mng.updated(k, (ord, v2)) - ong = ong.exclude(o).append(ord, k) - ord = increment(ord) - } - case None => - mng = mng.updated(k, (ord, v2)) - ong = ong.append(ord, k) - ord = increment(ord) - } - } - new TreeSeqMap[K, V2](ong, mng, ord, orderedBy) - } - - @`inline` private[this] def value(p: (_, V)) = p._2 - @`inline` private[this] def binding(k: K) = mapping(k).copy(_1 = k) -} -object TreeSeqMap extends MapFactory[TreeSeqMap] { - sealed trait OrderBy - object OrderBy { - case object Insertion extends OrderBy - case object Modification extends OrderBy - } - - private val EmptyByInsertion = new TreeSeqMap[Nothing, Nothing](Ordering.empty, HashMap.empty, 0, OrderBy.Insertion) - private val EmptyByModification = new TreeSeqMap[Nothing, Nothing](Ordering.empty, HashMap.empty, 0, OrderBy.Modification) - val Empty = EmptyByInsertion - def empty[K, V]: TreeSeqMap[K, V] = empty(OrderBy.Insertion) - def empty[K, V](orderBy: OrderBy): TreeSeqMap[K, V] = { - if (orderBy == OrderBy.Modification) EmptyByModification - else EmptyByInsertion - }.asInstanceOf[TreeSeqMap[K, V]] - - def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^): TreeSeqMap[K, V] = - it match { - case om: TreeSeqMap[K, V] => om - case _ => (newBuilder[K, V] ++= it).result() - } - - @inline private def increment(ord: Int) = if (ord == Int.MaxValue) Int.MinValue else ord + 1 - - def newBuilder[sealed K, sealed V]: mutable.Builder[(K, V), TreeSeqMap[K, V]] = newBuilder(OrderBy.Insertion) - def newBuilder[sealed K, sealed V](orderedBy: OrderBy): mutable.Builder[(K, V), TreeSeqMap[K, V]] = new Builder[K, V](orderedBy) - - final class Builder[sealed K, sealed V](orderedBy: OrderBy) extends mutable.Builder[(K, V), TreeSeqMap[K, V]] { - private[this] val bdr = new MapBuilderImpl[K, (Int, V)] - private[this] var ong = Ordering.empty[K] - private[this] var ord = 0 - private[this] var aliased: TreeSeqMap[K, V] = _ - - override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) - def addOne(key: K, value: V): this.type = { - if (aliased ne null) { - aliased = aliased.updated(key, value) - } else { - bdr.getOrElse(key, null) match { - case (o, v) => - if (orderedBy == OrderBy.Insertion && v != value) bdr.addOne(key, (o, value)) - else if (orderedBy == OrderBy.Modification) { - bdr.addOne(key, (ord, value)) - ong = ong.exclude(o).appendInPlace(ord, key) - ord = increment(ord) - } - case null => - bdr.addOne(key, (ord, value)) - ong = ong.appendInPlace(ord, key) - ord = increment(ord) - } - } - this - } - - override def clear(): Unit = { - ong = Ordering.empty - ord = 0 - bdr.clear() - aliased = null - } - - override def result(): TreeSeqMap[K, V] = { - if (aliased eq null) { - aliased = new TreeSeqMap(ong, bdr.result(), ord, orderedBy) - } - aliased - } - } - - private type Mapping[K, +V] = Map[K, (Int, V)] - @annotation.unused - private val Mapping = Map - - /* The ordering implementation below is an adapted version of immutable.IntMap. */ - private[immutable] object Ordering { - import scala.collection.generic.BitOperations.Int._ - - @inline private[immutable] def toBinaryString(i: Int): String = s"$i/${i.toBinaryString}" - - def empty[T] : Ordering[T] = Zero - - def apply[T](elems: (Int, T)*): Ordering[T] = - elems.foldLeft(empty[T])((x, y) => x.include(y._1, y._2)) - - // Iterator over a non-empty Ordering. - final class Iterator[+V](it: Ordering[V]) { - // Basically this uses a simple stack to emulate conversion over the tree. However - // because we know that Ints are at least 32 bits we can have at most 32 Bins and - // one Tip sitting on the tree at any point. Therefore we know the maximum stack - // depth is 33 - private[this] var index = 0 - private[this] val buffer = new Array[AnyRef](33) - - private[this] def pop = { - index -= 1 - buffer(index).asInstanceOf[Ordering[V]] - } - - private[this] def push[V2 >: V](x: Ordering[V2]): Unit = { - buffer(index) = x.asInstanceOf[AnyRef] - index += 1 - } - - if (it != Zero) push(it) - - def hasNext = index != 0 - @tailrec - def next(): V = - pop match { - case Bin(_,_, Tip(_, v), right) => - push(right) - v - case Bin(_, _, left, right) => - push(right) - push(left) - next() - case Tip(_, v) => v - // This should never happen. We don't allow Ordering.Zero in subtrees of the Ordering - // and don't return an Ordering.Iterator for Ordering.Zero. - case Zero => throw new IllegalStateException("empty subtree not allowed") - } - } - - object Iterator { - val Empty = new Iterator[Nothing](Ordering.empty[Nothing]) - def empty[V]: Iterator[V] = Empty.asInstanceOf[Iterator[V]] - } - - case object Zero extends Ordering[Nothing] { - // Important! Without this equals method in place, an infinite - // loop from Map.equals => size => pattern-match-on-Nil => equals - // develops. Case objects and custom equality don't mix without - // careful handling. - override def equals(that : Any): Boolean = that match { - case _: this.type => true - case _: Ordering[_] => false // The only empty Orderings are eq Nil - case _ => super.equals(that) - } - protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = sb ++= s"${prefix}Ø" - } - - final case class Tip[+T](ord: Int, value: T) extends Ordering[T] { - def withValue[S](s: S) = - if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[Tip[S]] - else Tip(ord, s) - protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = sb ++= s"${prefix}Tip(${toBinaryString(ord)} -> $value)\n" - } - - final case class Bin[+T](prefix: Int, mask: Int, left: Ordering[T], var right: Ordering[T @uncheckedCaptures] @scala.annotation.unchecked.uncheckedVariance) extends Ordering[T] { - def bin[S](left: Ordering[S], right: Ordering[S]): Ordering[S] = { - if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[Bin[S]] - else Bin[S](prefix, mask, left, right) - } - protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = { - sb ++= s"${prefix}Bin(${toBinaryString(this.prefix)}:${toBinaryString(mask)})\n" - left.format(sb, subPrefix + "├── ", subPrefix + "│ ") - right.format(sb, subPrefix + "└── ", subPrefix + " ") - } - } - - private def branchMask(i: Int, j: Int) = highestOneBit(i ^ j) - - private def join[T](p1: Int, t1: Ordering[T], p2: Int, t2: Ordering[T]): Ordering[T] = { - val m = branchMask(p1, p2) - val p = mask(p1, m) - if (zero(p1, m)) Bin(p, m, t1, t2) - else Bin(p, m, t2, t1) - } - - private def bin[T](prefix: Int, mask: Int, left: Ordering[T], right: Ordering[T]): Ordering[T] = (left, right) match { - case (l, Zero) => l - case (Zero, r) => r - case (l, r) => Bin(prefix, mask, l, r) - } - } - - sealed abstract class Ordering[+T] { - import Ordering._ - import scala.annotation.tailrec - import scala.collection.generic.BitOperations.Int._ - - override final def toString: String = format - final def format: String = { - val sb = new StringBuilder - format(sb, "", "") - sb.toString() - } - protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit - - @tailrec - final def head: T = this match { - case Zero => throw new NoSuchElementException("head of empty map") - case Tip(k, v) => v - case Bin(_, _, l, _) => l.head - } - - @tailrec - final def headOption: Option[T] = this match { - case Zero => None - case Tip(_, v) => Some(v) - case Bin(_, _, l, _) => l.headOption - } - - @tailrec - final def last: T = this match { - case Zero => throw new NoSuchElementException("last of empty map") - case Tip(_, v) => v - case Bin(_, _, _, r) => r.last - } - - @tailrec - final def lastOption: Option[T] = this match { - case Zero => None - case Tip(_, v) => Some(v) - case Bin(_, _, _, r) => r.lastOption - } - - @tailrec - final def ordinal: Int = this match { - case Zero => 0 - case Tip(o, _) => o - case Bin(_, _, _, r) => r.ordinal - } - - final def tail: Ordering[T] = this match { - case Zero => throw new NoSuchElementException("tail of empty map") - case Tip(_, _) => Zero - case Bin(p, m, l, r) => bin(p, m, l.tail, r) - } - - final def headTail: (T, Ordering[T]) = this match { - case Zero => throw new NoSuchElementException("init of empty map") - case Tip(_, v) => (v, Zero) - case Bin(p, m, l, r) => - val (head, tail) = l.headTail - (head, bin(p, m, tail, r)) - } - - final def init: Ordering[T] = this match { - case Zero => throw new NoSuchElementException("init of empty map") - case Tip(_, _) => Zero - case Bin(p, m, l, r) => - bin(p, m, l, r.init) - } - - final def initLast: (Ordering[T], T) = this match { - case Zero => throw new NoSuchElementException("init of empty map") - case Tip(_, v) => (Zero, v) - case Bin(p, m, l, r) => - val (init, last) = r.initLast - (bin(p, m, l, init), last) - } - - final def iterator: Iterator[T] = this match { - case Zero => Iterator.empty - case _ => new Iterator(this) - } - - final def include[S >: T](ordinal: Int, value: S): Ordering[S] = this match { - case Zero => - Tip(ordinal, value) - case Tip(o, _) => - if (ordinal == o) Tip(ordinal, value) - else join(ordinal, Tip(ordinal, value), o, this) - case Bin(p, m, l, r) => - if (!hasMatch(ordinal, p, m)) join(ordinal, Tip(ordinal, value), p, this) - else if (zero(ordinal, m)) Bin(p, m, l.include(ordinal, value), r) - else Bin(p, m, l, r.include(ordinal, value)) - } - - final def append[S >: T](ordinal: Int, value: S): Ordering[S] = this match { - case Zero => - Tip(ordinal, value) - case Tip(o, _) => - if (ordinal == o) Tip(ordinal, value) - else join(ordinal, Tip(ordinal, value), o, this) - case Bin(p, m, l, r) => - if (!hasMatch(ordinal, p, m)) join(ordinal, Tip(ordinal, value), p, this) - else if (zero(ordinal, m)) throw new IllegalArgumentException(s"Append called with ordinal out of range: $ordinal is not greater than current max ordinal ${this.ordinal}") - else Bin(p, m, l, r.append(ordinal, value)) - } - - @inline private[collection] final def appendInPlace[S >: T](ordinal: Int, value: S): Ordering[S] = appendInPlace1(null, ordinal, value) - private[collection] final def appendInPlace1[S >: T](parent: Bin[S], ordinal: Int, value: S): Ordering[S] = this match { - case Zero => - Tip(ordinal, value) - case Tip(o, _) if o >= ordinal => - throw new IllegalArgumentException(s"Append called with ordinal out of range: $o is not greater than current max ordinal ${this.ordinal}") - case Tip(o, _) if parent == null => - join(ordinal, Tip(ordinal, value), o, this) - case Tip(o, _) => - parent.right = join(ordinal, Tip(ordinal, value), o, this) - parent - case b @ Bin(p, m, _, r) => - if (!hasMatch(ordinal, p, m)) { - val b2 = join(ordinal, Tip(ordinal, value), p, this) - if (parent != null) { - parent.right = b2 - parent - } else b2 - } else if (zero(ordinal, m)) throw new IllegalArgumentException(s"Append called with ordinal out of range: $ordinal is not greater than current max ordinal ${this.ordinal}") - else { - r.appendInPlace1(b, ordinal, value) - this - } - } - - final def exclude(ordinal: Int): Ordering[T] = this match { - case Zero => - Zero - case Tip(o, _) => - if (ordinal == o) Zero - else this - case Bin(p, m, l, r) => - if (!hasMatch(ordinal, p, m)) this - else if (zero(ordinal, m)) bin(p, m, l.exclude(ordinal), r) - else bin(p, m, l, r.exclude(ordinal)) - } - - final def splitAt(n: Int): (Ordering[T], Ordering[T]) = { - var rear: Ordering[T @uncheckedCaptures] = Ordering.empty[T] - var i = n - (modifyOrRemove { (o, v) => - i -= 1 - if (i >= 0) Some(v) - else { - rear = rear.appendInPlace(o, v) - None - } - }, rear) - } - - /** - * A combined transform and filter function. Returns an `Ordering` such that - * for each `(key, value)` mapping in this map, if `f(key, value) == None` - * the map contains no mapping for key, and if `f(key, value) == Some(x)` the - * map contains `(key, x)`. - * - * @tparam S The type of the values in the resulting `LongMap`. - * @param f The transforming function. - * @return The modified map. - */ - final def modifyOrRemove[S](f: (Int, T) => Option[S]): Ordering[S] = this match { - case Zero => Zero - case Tip(key, value) => - f(key, value) match { - case None => Zero - case Some(value2) => - // hack to preserve sharing - if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[Ordering[S]] - else Tip(key, value2) - } - case Bin(prefix, mask, left, right) => - val l = left.modifyOrRemove(f) - val r = right.modifyOrRemove(f) - if ((left eq l) && (right eq r)) this.asInstanceOf[Ordering[S]] - else bin(prefix, mask, l, r) - } - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/TreeSet.scala b/tests/pos-special/stdlib/collection/immutable/TreeSet.scala deleted file mode 100644 index c4241b818c38..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/TreeSet.scala +++ /dev/null @@ -1,297 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializable -import scala.collection.mutable.ReusableBuilder -import scala.collection.immutable.{RedBlackTree => RB} -import scala.runtime.AbstractFunction1 -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** This class implements immutable sorted sets using a tree. - * - * @tparam A the type of the elements contained in this tree set - * @param ordering the implicit ordering used to compare objects of type `A` - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]] - * section on `Red-Black Trees` for more information. - * - * @define Coll `immutable.TreeSet` - * @define coll immutable tree set - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -final class TreeSet[A] private[immutable] (private[immutable] val tree: RB.Tree[A, Any])(implicit val ordering: Ordering[A]) - extends AbstractSet[A] - with SortedSet[A] - with SortedSetOps[A, TreeSet, TreeSet[A]] - with StrictOptimizedSortedSetOps[A, TreeSet, TreeSet[A]] - with SortedSetFactoryDefaults[A, TreeSet, Set] - with DefaultSerializable { - - if (ordering eq null) throw new NullPointerException("ordering must not be null") - - def this()(implicit ordering: Ordering[A]) = this(null)(ordering) - - override def sortedIterableFactory = TreeSet - - private[this] def newSetOrSelf(t: RB.Tree[A, Any]) = if(t eq tree) this else new TreeSet[A](t) - - override def size: Int = RB.count(tree) - - override def isEmpty = size == 0 - - override def head: A = RB.smallest(tree).key - - override def last: A = RB.greatest(tree).key - - override def tail: TreeSet[A] = new TreeSet(RB.tail(tree)) - - override def init: TreeSet[A] = new TreeSet(RB.init(tree)) - - override def min[A1 >: A](implicit ord: Ordering[A1]): A = { - if ((ord eq ordering) && nonEmpty) { - head - } else { - super.min(ord) - } - } - - override def max[A1 >: A](implicit ord: Ordering[A1]): A = { - if ((ord eq ordering) && nonEmpty) { - last - } else { - super.max(ord) - } - } - - override def drop(n: Int): TreeSet[A] = { - if (n <= 0) this - else if (n >= size) empty - else new TreeSet(RB.drop(tree, n)) - } - - override def take(n: Int): TreeSet[A] = { - if (n <= 0) empty - else if (n >= size) this - else new TreeSet(RB.take(tree, n)) - } - - override def slice(from: Int, until: Int): TreeSet[A] = { - if (until <= from) empty - else if (from <= 0) take(until) - else if (until >= size) drop(from) - else new TreeSet(RB.slice(tree, from, until)) - } - - override def dropRight(n: Int): TreeSet[A] = take(size - math.max(n, 0)) - - override def takeRight(n: Int): TreeSet[A] = drop(size - math.max(n, 0)) - - private[this] def countWhile(p: A => Boolean): Int = { - var result = 0 - val it = iterator - while (it.hasNext && p(it.next())) result += 1 - result - } - override def dropWhile(p: A => Boolean): TreeSet[A] = drop(countWhile(p)) - - override def takeWhile(p: A => Boolean): TreeSet[A] = take(countWhile(p)) - - override def span(p: A => Boolean): (TreeSet[A], TreeSet[A]) = splitAt(countWhile(p)) - - override def foreach[U](f: A => U): Unit = RB.foreachKey(tree, f) - - override def minAfter(key: A): Option[A] = { - val v = RB.minAfter(tree, key) - if (v eq null) Option.empty else Some(v.key) - } - - override def maxBefore(key: A): Option[A] = { - val v = RB.maxBefore(tree, key) - if (v eq null) Option.empty else Some(v.key) - } - - def iterator: Iterator[A] = RB.keysIterator(tree) - - def iteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start)) - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { - import scala.collection.convert.impl._ - type T = RB.Tree[A, Any] - val s = shape.shape match { - case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Int]) - case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Long]) - case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree, _.left, _.right, _.key.asInstanceOf[Double]) - case _ => shape.parUnbox(AnyBinaryTreeStepper.from[A, T](size, tree, _.left, _.right, _.key)) - } - s.asInstanceOf[S with EfficientSplit] - } - - /** Checks if this set contains element `elem`. - * - * @param elem the element to check for membership. - * @return true, iff `elem` is contained in this set. - */ - def contains(elem: A): Boolean = RB.contains(tree, elem) - - override def range(from: A, until: A): TreeSet[A] = newSetOrSelf(RB.range(tree, from, until)) - - def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = newSetOrSelf(RB.rangeImpl(tree, from, until)) - - /** Creates a new `TreeSet` with the entry added. - * - * @param elem a new element to add. - * @return a new $coll containing `elem` and all the elements of this $coll. - */ - def incl(elem: A): TreeSet[A] = - newSetOrSelf(RB.update(tree, elem, null, overwrite = false)) - - /** Creates a new `TreeSet` with the entry removed. - * - * @param elem a new element to add. - * @return a new $coll containing all the elements of this $coll except `elem`. - */ - def excl(elem: A): TreeSet[A] = - newSetOrSelf(RB.delete(tree, elem)) - - override def concat(that: collection.IterableOnce[A]): TreeSet[A] = { - val t = that match { - case ts: TreeSet[A] if ordering == ts.ordering => - RB.union(tree, ts.tree) - case _ => - val it = that.iterator - var t = tree - while (it.hasNext) t = RB.update(t, it.next(), null, overwrite = false) - t - } - newSetOrSelf(t) - } - - override def removedAll(that: IterableOnce[A]): TreeSet[A] = that match { - case ts: TreeSet[A] if ordering == ts.ordering => - newSetOrSelf(RB.difference(tree, ts.tree)) - case _ => - //TODO add an implementation of a mutable subtractor similar to TreeMap - //but at least this doesn't create a TreeSet for each iteration - object sub extends AbstractFunction1[A, Unit] { - var currentTree = tree - override def apply(k: A): Unit = { - currentTree = RB.delete(currentTree, k) - } - } - that.iterator.foreach(sub) - newSetOrSelf(sub.currentTree) - } - - override def intersect(that: collection.Set[A]): TreeSet[A] = that match { - case ts: TreeSet[A] if ordering == ts.ordering => - newSetOrSelf(RB.intersect(tree, ts.tree)) - case _ => - super.intersect(that) - } - - override def diff(that: collection.Set[A]): TreeSet[A] = that match { - case ts: TreeSet[A] if ordering == ts.ordering => - newSetOrSelf(RB.difference(tree, ts.tree)) - case _ => - super.diff(that) - } - - override def filter(f: A => Boolean): TreeSet[A] = newSetOrSelf(RB.filterEntries[A, Any](tree, {(k, _) => f(k)})) - - override def partition(p: A => Boolean): (TreeSet[A], TreeSet[A]) = { - val (l, r) = RB.partitionEntries(tree, {(a:A, _: Any) => p(a)}) - (newSetOrSelf(l), newSetOrSelf(r)) - } - - override def equals(obj: Any): Boolean = obj match { - case that: TreeSet[A @unchecked] if ordering == that.ordering => RB.keysEqual(tree, that.tree) - case _ => super.equals(obj) - } - - override protected[this] def className = "TreeSet" -} - -/** - * $factoryInfo - * - * @define Coll `immutable.TreeSet` - * @define coll immutable tree set - */ -@SerialVersionUID(3L) -object TreeSet extends SortedIterableFactory[TreeSet] { - - def empty[A: Ordering]: TreeSet[A] = new TreeSet[A] - - def from[E](it: scala.collection.IterableOnce[E]^)(implicit ordering: Ordering[E]): TreeSet[E] = - it match { - case ts: TreeSet[E] if ordering == ts.ordering => ts - case ss: scala.collection.SortedSet[E] if ordering == ss.ordering => - new TreeSet[E](RB.fromOrderedKeys(ss.iterator, ss.size)) - case r: Range if (ordering eq Ordering.Int) || (Ordering.Int isReverseOf ordering) => - val it = if((ordering eq Ordering.Int) == (r.step > 0)) r.iterator else r.reverseIterator - val tree = RB.fromOrderedKeys(it.asInstanceOf[Iterator[E]], r.size) - // The cast is needed to compile with Dotty: - // Dotty doesn't infer that E =:= Int, since instantiation of covariant GADTs is unsound - new TreeSet[E](tree) - case _ => - var t: RB.Tree[E, Null] = null - val i = it.iterator - while (i.hasNext) t = RB.update(t, i.next(), null, overwrite = false) - new TreeSet[E](t) - } - - def newBuilder[A](implicit ordering: Ordering[A]): ReusableBuilder[A, TreeSet[A]] = new TreeSetBuilder[A] - private class TreeSetBuilder[A](implicit ordering: Ordering[A]) - extends RB.SetHelper[A] - with ReusableBuilder[A, TreeSet[A]] { - type Tree = RB.Tree[A, Any] - private [this] var tree:RB.Tree[A @uncheckedCaptures, Any] = null - - override def addOne(elem: A): this.type = { - tree = mutableUpd(tree, elem) - this - } - - override def addAll(xs: IterableOnce[A]^): this.type = { - xs match { - // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++= - // for the moment we have to force immutability before the union - // which will waste some time and space - // calling `beforePublish` makes `tree` immutable - case ts: TreeSet[A] if ts.ordering == ordering => - if (tree eq null) tree = ts.tree - else tree = RB.union(beforePublish(tree), ts.tree)(ordering) - case ts: TreeMap[A @unchecked, _] if ts.ordering == ordering => - if (tree eq null) tree = ts.tree0 - else tree = RB.union(beforePublish(tree), ts.tree0)(ordering) - case _ => - super.addAll(xs) - } - this - } - - override def clear(): Unit = { - tree = null - } - - override def result(): TreeSet[A] = new TreeSet[A](beforePublish(tree))(ordering) - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/Vector.scala b/tests/pos-special/stdlib/collection/immutable/Vector.scala deleted file mode 100644 index d9d33add512d..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/Vector.scala +++ /dev/null @@ -1,2476 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package immutable - -import java.lang.Math.{abs, max => mmax, min => mmin} -import java.util.Arrays.{copyOf, copyOfRange} -import java.util.{Arrays, Spliterator} - -import scala.annotation.switch -import scala.annotation.unchecked.uncheckedVariance -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializable -import scala.collection.immutable.VectorInline._ -import scala.collection.immutable.VectorStatics._ -import scala.collection.mutable.ReusableBuilder -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - - -/** $factoryInfo - * @define Coll `Vector` - * @define coll vector - */ -@SerialVersionUID(3L) -object Vector extends StrictOptimizedSeqFactory[Vector] { - - def empty[A]: Vector[A] = Vector0 - - def from[E](it: collection.IterableOnce[E]^): Vector[E] = - it match { - case v: Vector[E] => v - case _ => - val knownSize = it.knownSize - if (knownSize == 0) empty[E] - else if (knownSize > 0 && knownSize <= WIDTH) { - val a1: Arr1 = it match { - case as: ArraySeq.ofRef[_] if as.elemTag.runtimeClass == classOf[AnyRef] => - as.unsafeArray.asInstanceOf[Arr1] - case it: Iterable[E] => - val a1 = new Arr1(knownSize) - it.copyToArray(a1.asInstanceOf[Array[Any]]) - a1 - case _ => - val a1 = new Arr1(knownSize) - it.iterator.copyToArray(a1.asInstanceOf[Array[Any]]) - a1.asInstanceOf[Arr1] - } - new Vector1[E](a1) - } else { - (newBuilder ++= it).result() - } - } - - def newBuilder[A]: ReusableBuilder[A, Vector[A]] = new VectorBuilder[A] - - /** Create a Vector with the same element at each index. - * - * Unlike `fill`, which takes a by-name argument for the value and can thereby - * compute different values for each index, this method guarantees that all - * elements are identical. This allows sparse allocation in O(log n) time and space. - */ - private[collection] def fillSparse[A](n: Int)(elem: A): Vector[A] = { - //TODO Make public; this method is private for now because it is not forward binary compatible - if(n <= 0) Vector0 - else { - val b = new VectorBuilder[A] - b.initSparse(n, elem) - b.result() - } - } - - private val defaultApplyPreferredMaxLength: Int = - try System.getProperty("scala.collection.immutable.Vector.defaultApplyPreferredMaxLength", - "250").toInt - catch { - case _: SecurityException => 250 - } - - private val emptyIterator = new NewVectorIterator(Vector0, 0, 0) -} - - -/** Vector is a general-purpose, immutable data structure. It provides random access and updates - * in O(log n) time, as well as very fast append/prepend/tail/init (amortized O(1), worst case O(log n)). - * Because vectors strike a good balance between fast random selections and fast random functional updates, - * they are currently the default implementation of immutable indexed sequences. - * - * Vectors are implemented by radix-balanced finger trees of width 32. There is a separate subclass - * for each level (0 to 6, with 0 being the empty vector and 6 a tree with a maximum width of 64 at the - * top level). - * - * Tree balancing: - * - Only the first dimension of an array may have a size < WIDTH - * - In a `data` (central) array the first dimension may be up to WIDTH-2 long, in `prefix1` and `suffix1` up - * to WIDTH, and in other `prefix` and `suffix` arrays up to WIDTH-1 - * - `prefix1` and `suffix1` are never empty - * - Balancing does not cross the main data array (i.e. prepending never touches the suffix and appending never touches - * the prefix). The level is increased/decreased when the affected side plus main data is already full/empty - * - All arrays are left-aligned and truncated - * - * In addition to the data slices (`prefix1`, `prefix2`, ..., `dataN`, ..., `suffix2`, `suffix1`) we store a running - * count of elements after each prefix for more efficient indexing without having to dereference all prefix arrays. - */ -sealed abstract class Vector[+A] private[immutable] (private[immutable] final val prefix1: Arr1) - extends AbstractSeq[A] - with IndexedSeq[A] - with IndexedSeqOps[A, Vector, Vector[A]] - with StrictOptimizedSeqOps[A, Vector, Vector[A]] - with IterableFactoryDefaults[A, Vector] - with DefaultSerializable { - - override def iterableFactory: SeqFactory[Vector] = Vector - - override final def length: Int = - if(this.isInstanceOf[BigVector[_]]) this.asInstanceOf[BigVector[_]].length0 - else prefix1.length - - override final def iterator: Iterator[A] = - if(this.isInstanceOf[Vector0.type]) Vector.emptyIterator - else new NewVectorIterator(this, length, vectorSliceCount) - - override final protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Vector[A] = { - var i = 0 - val len = prefix1.length - while (i != len) { - if (pred(prefix1(i).asInstanceOf[A]) == isFlipped) { - // each 1 bit indicates that index passes the filter. - // all indices < i are also assumed to pass the filter - var bitmap = 0 - var j = i + 1 - while (j < len) { - if (pred(prefix1(j).asInstanceOf[A]) != isFlipped) { - bitmap |= (1 << j) - } - j += 1 - } - val newLen = i + java.lang.Integer.bitCount(bitmap) - - if(this.isInstanceOf[BigVector[_]]) { - val b = new VectorBuilder[A] - var k = 0 - while(k < i) { - b.addOne(prefix1(k).asInstanceOf[A]) - k += 1 - } - k = i + 1 - while (i != newLen) { - if (((1 << k) & bitmap) != 0) { - b.addOne(prefix1(k).asInstanceOf[A]) - i += 1 - } - k += 1 - } - this.asInstanceOf[BigVector[A]].foreachRest { v => if(pred(v) != isFlipped) b.addOne(v) } - return b.result() - } else { - if (newLen == 0) return Vector0 - val newData = new Array[AnyRef](newLen) - System.arraycopy(prefix1, 0, newData, 0, i) - var k = i + 1 - while (i != newLen) { - if (((1 << k) & bitmap) != 0) { - newData(i) = prefix1(k) - i += 1 - } - k += 1 - } - return new Vector1[A](newData) - } - } - i += 1 - } - if(this.isInstanceOf[BigVector[_]]) { - val b = new VectorBuilder[A] - b.initFrom(prefix1) - this.asInstanceOf[BigVector[A]].foreachRest { v => if(pred(v) != isFlipped) b.addOne(v) } - b.result() - } else this - } - - // Dummy overrides to refine result types for binary compatibility: - override def updated[B >: A](index: Int, elem: B): Vector[B] = super.updated(index, elem) - override def appended[B >: A](elem: B): Vector[B] = super.appended(elem) - override def prepended[B >: A](elem: B): Vector[B] = super.prepended(elem) - override def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): Vector[B] = { - val k = prefix.knownSize - if (k == 0) this - else if (k < 0) super.prependedAll(prefix) - else prependedAll0(prefix, k) - } - - override final def appendedAll[B >: A](suffix: collection.IterableOnce[B]^): Vector[B] = { - val k = suffix.knownSize - if (k == 0) this - else if (k < 0) super.appendedAll(suffix) - else appendedAll0(suffix, k) - } - - protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = { - // k >= 0, k = prefix.knownSize - val tinyAppendLimit = 4 + vectorSliceCount - if (k < tinyAppendLimit /*|| k < (this.size >>> Log2ConcatFaster)*/) { - var v: Vector[B] = this - val it = IndexedSeq.from(prefix).reverseIterator - while (it.hasNext) v = it.next() +: v - v - } else if (this.size < (k >>> Log2ConcatFaster) && prefix.isInstanceOf[Vector[_]]) { - var v = prefix.asInstanceOf[Vector[B]] - val it = this.iterator - while (it.hasNext) v = v :+ it.next() - v - } else if (k < this.size - AlignToFaster) { - new VectorBuilder[B].alignTo(k, this).addAll(prefix).addAll(this).result() - } else super.prependedAll(prefix) - } - - protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { - // k >= 0, k = suffix.knownSize - val tinyAppendLimit = 4 + vectorSliceCount - if (k < tinyAppendLimit) { - var v: Vector[B @uncheckedCaptures] = this - suffix match { - case it: Iterable[_] => it.asInstanceOf[Iterable[B]].foreach(x => v = v.appended(x)) - case _ => suffix.iterator.foreach(x => v = v.appended(x)) - } - v - } else if (this.size < (k >>> Log2ConcatFaster) && suffix.isInstanceOf[Vector[_]]) { - var v = suffix.asInstanceOf[Vector[B]] - val ri = this.reverseIterator - while (ri.hasNext) v = v.prepended(ri.next()) - v - } else if (this.size < k - AlignToFaster && suffix.isInstanceOf[Vector[_]]) { - val v = suffix.asInstanceOf[Vector[B]] - new VectorBuilder[B].alignTo(this.size, v).addAll(this).addAll(v).result() - } else new VectorBuilder[B].initFrom(this).addAll(suffix).result() - } - - override def className = "Vector" - - @inline override final def take(n: Int): Vector[A] = slice(0, n) - @inline override final def drop(n: Int): Vector[A] = slice(n, length) - @inline override final def takeRight(n: Int): Vector[A] = slice(length - mmax(n, 0), length) - @inline override final def dropRight(n: Int): Vector[A] = slice(0, length - mmax(n, 0)) - override def tail: Vector[A] = slice(1, length) - override def init: Vector[A] = slice(0, length-1) - - /** Like slice but parameters must be 0 <= lo < hi < length */ - protected[this] def slice0(lo: Int, hi: Int): Vector[A] - - /** Number of slices */ - protected[immutable] def vectorSliceCount: Int - /** Slice at index */ - protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] - /** Length of all slices up to and including index */ - protected[immutable] def vectorSlicePrefixLength(idx: Int): Int - - override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = iterator.copyToArray(xs, start, len) - - override def toVector: Vector[A] = this - - override protected def applyPreferredMaxLength: Int = Vector.defaultApplyPreferredMaxLength - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { - val s = shape.shape match { - case StepperShape.IntShape => new IntVectorStepper(iterator.asInstanceOf[NewVectorIterator[Int]]) - case StepperShape.LongShape => new LongVectorStepper(iterator.asInstanceOf[NewVectorIterator[Long]]) - case StepperShape.DoubleShape => new DoubleVectorStepper(iterator.asInstanceOf[NewVectorIterator[Double]]) - case _ => shape.parUnbox(new AnyVectorStepper[A](iterator.asInstanceOf[NewVectorIterator[A]])) - } - s.asInstanceOf[S with EfficientSplit] - } - - protected[this] def ioob(index: Int): IndexOutOfBoundsException = - new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${length-1})") - - override final def head: A = - if (prefix1.length == 0) throw new NoSuchElementException("empty.head") - else prefix1(0).asInstanceOf[A] - - override final def last: A = { - if(this.isInstanceOf[BigVector[_]]) { - val suffix = this.asInstanceOf[BigVector[_]].suffix1 - if(suffix.length == 0) throw new NoSuchElementException("empty.tail") - else suffix(suffix.length-1) - } else prefix1(prefix1.length-1) - }.asInstanceOf[A] - - override final def foreach[U](f: A => U): Unit = { - val c = vectorSliceCount - var i = 0 - while (i < c) { - foreachRec(vectorSliceDim(c, i) - 1, vectorSlice(i), f) - i += 1 - } - } - - // The following definitions are needed for binary compatibility with ParVector - private[collection] def startIndex: Int = 0 - private[collection] def endIndex: Int = length - private[collection] def initIterator[B >: A](s: VectorIterator[B]): Unit = - s.it = iterator.asInstanceOf[NewVectorIterator[B]] -} - - -/** This class only exists because we cannot override `slice` in `Vector` in a binary-compatible way */ -private sealed abstract class VectorImpl[+A](_prefix1: Arr1) extends Vector[A](_prefix1) { - - override final def slice(from: Int, until: Int): Vector[A] = { - val lo = mmax(from, 0) - val hi = mmin(until, length) - if (hi <= lo) Vector0 - else if (hi - lo == length) this - else slice0(lo, hi) - } -} - - -/** Vector with suffix and length fields; all Vector subclasses except Vector1 extend this */ -private sealed abstract class BigVector[+A](_prefix1: Arr1, private[immutable] val suffix1: Arr1, private[immutable] val length0: Int) extends VectorImpl[A](_prefix1) { - - protected[immutable] final def foreachRest[U](f: A => U): Unit = { - val c = vectorSliceCount - var i = 1 - while(i < c) { - foreachRec(vectorSliceDim(c, i)-1, vectorSlice(i), f) - i += 1 - } - } -} - - -/** Empty vector */ -private object Vector0 extends BigVector[Nothing](empty1, empty1, 0) { - - def apply(index: Int): Nothing = throw ioob(index) - - override def updated[B >: Nothing](index: Int, elem: B): Vector[B] = throw ioob(index) - - override def appended[B >: Nothing](elem: B): Vector[B] = new Vector1(wrap1(elem)) - - override def prepended[B >: Nothing](elem: B): Vector[B] = new Vector1(wrap1(elem)) - - override def map[B](f: Nothing => B): Vector[B] = this - - override def tail: Vector[Nothing] = throw new UnsupportedOperationException("empty.tail") - - override def init: Vector[Nothing] = throw new UnsupportedOperationException("empty.init") - - protected[this] def slice0(lo: Int, hi: Int): Vector[Nothing] = this - - protected[immutable] def vectorSliceCount: Int = 0 - protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = null - protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = 0 - - override def equals(o: Any): Boolean = { - if(this eq o.asInstanceOf[AnyRef]) true - else o match { - case that: Vector[_] => false - case o => super.equals(o) - } - } - - override protected[this]def prependedAll0[B >: Nothing](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = - Vector.from(prefix) - - override protected[this]def appendedAll0[B >: Nothing](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = - Vector.from(suffix) - - override protected[this] def ioob(index: Int): IndexOutOfBoundsException = - new IndexOutOfBoundsException(s"$index is out of bounds (empty vector)") -} - -/** Flat ArraySeq-like structure */ -private final class Vector1[+A](_data1: Arr1) extends VectorImpl[A](_data1) { - - @inline def apply(index: Int): A = { - if(index >= 0 && index < prefix1.length) - prefix1(index).asInstanceOf[A] - else throw ioob(index) - } - - override def updated[B >: A](index: Int, elem: B): Vector[B] = { - if(index >= 0 && index < prefix1.length) - new Vector1(copyUpdate(prefix1, index, elem)) - else throw ioob(index) - } - - override def appended[B >: A](elem: B): Vector[B] = { - val len1 = prefix1.length - if(len1 < WIDTH) new Vector1(copyAppend1(prefix1, elem)) - else new Vector2(prefix1, WIDTH, empty2, wrap1(elem), WIDTH+1) - } - - override def prepended[B >: A](elem: B): Vector[B] = { - val len1 = prefix1.length - if(len1 < WIDTH) new Vector1(copyPrepend1(elem, prefix1)) - else new Vector2(wrap1(elem), 1, empty2, prefix1, len1+1) - } - - override def map[B](f: A => B): Vector[B] = new Vector1(mapElems1(prefix1, f)) - - protected[this] def slice0(lo: Int, hi: Int): Vector[A] = - new Vector1(copyOfRange(prefix1, lo, hi)) - - override def tail: Vector[A] = - if(prefix1.length == 1) Vector0 - else new Vector1(copyTail(prefix1)) - - override def init: Vector[A] = - if(prefix1.length == 1) Vector0 - else new Vector1(copyInit(prefix1)) - - protected[immutable] def vectorSliceCount: Int = 1 - protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = prefix1 - protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = prefix1.length - - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = - prepend1IfSpace(prefix1, prefix) match { - case null => super.prependedAll0(prefix, k) - case data1b => new Vector1(data1b) - } - - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { - val data1b = append1IfSpace(prefix1, suffix) - if(data1b ne null) new Vector1(data1b) - else super.appendedAll0(suffix, k) - } -} - - -/** 2-dimensional radix-balanced finger tree */ -private final class Vector2[+A](_prefix1: Arr1, private[immutable] val len1: Int, - private[immutable] val data2: Arr2, - _suffix1: Arr1, - _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { - - @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, - data2: Arr2 = data2, - suffix1: Arr1 = suffix1, - length0: Int = length0) = - new Vector2(prefix1, len1, data2, suffix1, length0) - - @inline def apply(index: Int): A = { - if(index >= 0 && index < length0) { - val io = index - len1 - if(io >= 0) { - val i2 = io >>> BITS - val i1 = io & MASK - if(i2 < data2.length) data2(i2)(i1) - else suffix1(io & MASK) - } else prefix1(index) - }.asInstanceOf[A] else throw ioob(index) - } - - override def updated[B >: A](index: Int, elem: B): Vector[B] = { - if(index >= 0 && index < length0) { - if(index >= len1) { - val io = index - len1 - val i2 = io >>> BITS - val i1 = io & MASK - if(i2 < data2.length) copy(data2 = copyUpdate(data2, i2, i1, elem)) - else copy(suffix1 = copyUpdate(suffix1, i1, elem)) - } else { - copy(prefix1 = copyUpdate(prefix1, index, elem)) - } - } else throw ioob(index) - } - - override def appended[B >: A](elem: B): Vector[B] = { - if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) - else if(data2.length < WIDTH-2) copy(data2 = copyAppend(data2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) - else new Vector3(prefix1, len1, data2, WIDTH*(WIDTH-2) + len1, empty3, wrap2(suffix1), wrap1(elem), length0+1) - } - - override def prepended[B >: A](elem: B): Vector[B] = { - if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, length0 = length0+1) - else if(data2.length < WIDTH-2) copy(wrap1(elem), 1, copyPrepend(prefix1, data2), length0 = length0+1) - else new Vector3(wrap1(elem), 1, wrap2(prefix1), len1+1, empty3, data2, suffix1, length0+1) - } - - override def map[B](f: A => B): Vector[B] = - copy(prefix1 = mapElems1(prefix1, f), data2 = mapElems(2, data2, f), suffix1 = mapElems1(suffix1, f)) - - protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { - val b = new VectorSliceBuilder(lo, hi) - b.consider(1, prefix1) - b.consider(2, data2) - b.consider(1, suffix1) - b.result() - } - - override def tail: Vector[A] = - if(len1 > 1) copy(copyTail(prefix1), len1-1, length0 = length0-1) - else slice0(1, length0) - - override def init: Vector[A] = - if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) - else slice0(0, length0-1) - - protected[immutable] def vectorSliceCount: Int = 3 - protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { - case 0 => prefix1 - case 1 => data2 - case 2 => suffix1 - } - protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { - case 0 => len1 - case 1 => length0 - suffix1.length - case 2 => length0 - } - - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = - prepend1IfSpace(prefix1, prefix) match { - case null => super.prependedAll0(prefix, k) - case prefix1b => - val diff = prefix1b.length - prefix1.length - copy(prefix1 = prefix1b, - len1 = len1 + diff, - length0 = length0 + diff, - ) - } - - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { - val suffix1b = append1IfSpace(suffix1, suffix) - if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) - else super.appendedAll0(suffix, k) - } -} - - -/** 3-dimensional radix-balanced finger tree */ -private final class Vector3[+A](_prefix1: Arr1, private[immutable] val len1: Int, - private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, - private[immutable] val data3: Arr3, - private[immutable] val suffix2: Arr2, _suffix1: Arr1, - _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { - - @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, - prefix2: Arr2 = prefix2, len12: Int = len12, - data3: Arr3 = data3, - suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, - length0: Int = length0) = - new Vector3(prefix1, len1, prefix2, len12, data3, suffix2, suffix1, length0) - - @inline def apply(index: Int): A = { - if(index >= 0 && index < length0) { - val io = index - len12 - if(io >= 0) { - val i3 = io >>> BITS2 - val i2 = (io >>> BITS) & MASK - val i1 = io & MASK - if(i3 < data3.length) data3(i3)(i2)(i1) - else if(i2 < suffix2.length) suffix2(i2)(i1) - else suffix1(i1) - } else if(index >= len1) { - val io = index - len1 - prefix2(io >>> BITS)(io & MASK) - } else prefix1(index) - }.asInstanceOf[A] else throw ioob(index) - } - - override def updated[B >: A](index: Int, elem: B): Vector[B] = { - if(index >= 0 && index < length0) { - if(index >= len12) { - val io = index - len12 - val i3 = io >>> BITS2 - val i2 = (io >>> BITS) & MASK - val i1 = io & MASK - if (i3 < data3.length ) copy(data3 = copyUpdate(data3, i3, i2, i1, elem)) - else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) - else copy(suffix1 = copyUpdate(suffix1, i1, elem)) - } else if(index >= len1) { - val io = index - len1 - copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) - } else { - copy(prefix1 = copyUpdate(prefix1, index, elem)) - } - } else throw ioob(index) - } - - override def appended[B >: A](elem: B): Vector[B] = { - if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) - else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) - else if(data3.length < WIDTH-2) copy(data3 = copyAppend(data3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) - else new Vector4(prefix1, len1, prefix2, len12, data3, (WIDTH-2)*WIDTH2 + len12, empty4, wrap3(copyAppend(suffix2, suffix1)), empty2, wrap1(elem), length0+1) - } - - override def prepended[B >: A](elem: B): Vector[B] = { - if (len1 < WIDTH ) copy(prefix1 = copyPrepend1(elem, prefix1), len1 = len1+1, len12 = len12+1, length0 = length0+1) - else if(len12 < WIDTH2 ) copy(prefix1 = wrap1(elem), len1 = 1, prefix2 = copyPrepend(prefix1, prefix2), len12 = len12+1, length0 = length0+1) - else if(data3.length < WIDTH-2) copy(prefix1 = wrap1(elem), len1 = 1, prefix2 = empty2, len12 = 1, data3 = copyPrepend(copyPrepend(prefix1, prefix2), data3), length0 = length0+1) - else new Vector4(wrap1(elem), 1, empty2, 1, wrap3(copyPrepend(prefix1, prefix2)), len12+1, empty4, data3, suffix2, suffix1, length0+1) - } - - override def map[B](f: A => B): Vector[B] = - copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), - data3 = mapElems(3, data3, f), - suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) - - protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { - val b = new VectorSliceBuilder(lo, hi) - b.consider(1, prefix1) - b.consider(2, prefix2) - b.consider(3, data3) - b.consider(2, suffix2) - b.consider(1, suffix1) - b.result() - } - - override def tail: Vector[A] = - if(len1 > 1) copy(prefix1 = copyTail(prefix1), len1 = len1-1, len12 = len12-1, length0 = length0-1) - else slice0(1, length0) - - override def init: Vector[A] = - if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) - else slice0(0, length0-1) - - protected[immutable] def vectorSliceCount: Int = 5 - protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { - case 0 => prefix1 - case 1 => prefix2 - case 2 => data3 - case 3 => suffix2 - case 4 => suffix1 - } - protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { - case 0 => len1 - case 1 => len12 - case 2 => len12 + data3.length*WIDTH2 - case 3 => length0 - suffix1.length - case 4 => length0 - } - - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = - prepend1IfSpace(prefix1, prefix) match { - case null => super.prependedAll0(prefix, k) - case prefix1b => - val diff = prefix1b.length - prefix1.length - copy(prefix1 = prefix1b, - len1 = len1 + diff, - len12 = len12 + diff, - length0 = length0 + diff, - ) - } - - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { - val suffix1b = append1IfSpace(suffix1, suffix) - if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) - else super.appendedAll0(suffix, k) - } -} - - -/** 4-dimensional radix-balanced finger tree */ -private final class Vector4[+A](_prefix1: Arr1, private[immutable] val len1: Int, - private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, - private[immutable] val prefix3: Arr3, private[immutable] val len123: Int, - private[immutable] val data4: Arr4, - private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1, - _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { - - @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, - prefix2: Arr2 = prefix2, len12: Int = len12, - prefix3: Arr3 = prefix3, len123: Int = len123, - data4: Arr4 = data4, - suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, - length0: Int = length0) = - new Vector4(prefix1, len1, prefix2, len12, prefix3, len123, data4, suffix3, suffix2, suffix1, length0) - - @inline def apply(index: Int): A = { - if(index >= 0 && index < length0) { - val io = index - len123 - if(io >= 0) { - val i4 = io >>> BITS3 - val i3 = (io >>> BITS2) & MASK - val i2 = (io >>> BITS) & MASK - val i1 = io & MASK - if(i4 < data4.length) data4(i4)(i3)(i2)(i1) - else if(i3 < suffix3.length) suffix3(i3)(i2)(i1) - else if(i2 < suffix2.length) suffix2(i2)(i1) - else suffix1(i1) - } else if(index >= len12) { - val io = index - len12 - prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK) - } else if(index >= len1) { - val io = index - len1 - prefix2(io >>> BITS)(io & MASK) - } else prefix1(index) - }.asInstanceOf[A] else throw ioob(index) - } - - override def updated[B >: A](index: Int, elem: B): Vector[B] = { - if(index >= 0 && index < length0) { - if(index >= len123) { - val io = index - len123 - val i4 = io >>> BITS3 - val i3 = (io >>> BITS2) & MASK - val i2 = (io >>> BITS) & MASK - val i1 = io & MASK - if (i4 < data4.length ) copy(data4 = copyUpdate(data4, i4, i3, i2, i1, elem)) - else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem)) - else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) - else copy(suffix1 = copyUpdate(suffix1, i1, elem)) - } else if(index >= len12) { - val io = index - len12 - copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem)) - } else if(index >= len1) { - val io = index - len1 - copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) - } else { - copy(prefix1 = copyUpdate(prefix1, index, elem)) - } - } else throw ioob(index) - } - - override def appended[B >: A](elem: B): Vector[B] = { - if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) - else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) - else if(suffix3.length < WIDTH-1) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) - else if(data4.length < WIDTH-2) copy(data4 = copyAppend(data4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) - else new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, data4, (WIDTH-2)*WIDTH3 + len123, empty5, wrap4(copyAppend(suffix3, copyAppend(suffix2, suffix1))), empty3, empty2, wrap1(elem), length0+1) - } - - override def prepended[B >: A](elem: B): Vector[B] = { - if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, length0 = length0+1) - else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, length0 = length0+1) - else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, length0 = length0+1) - else if(data4.length < WIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), data4), length0 = length0+1) - else new Vector5(wrap1(elem), 1, empty2, 1, empty3, 1, wrap4(copyPrepend(copyPrepend(prefix1, prefix2), prefix3)), len123+1, empty5, data4, suffix3, suffix2, suffix1, length0+1) - } - - override def map[B](f: A => B): Vector[B] = - copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), - data4 = mapElems(4, data4, f), - suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) - - protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { - val b = new VectorSliceBuilder(lo, hi) - b.consider(1, prefix1) - b.consider(2, prefix2) - b.consider(3, prefix3) - b.consider(4, data4) - b.consider(3, suffix3) - b.consider(2, suffix2) - b.consider(1, suffix1) - b.result() - } - - override def tail: Vector[A] = - if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, length0 = length0-1) - else slice0(1, length0) - - override def init: Vector[A] = - if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) - else slice0(0, length0-1) - - protected[immutable] def vectorSliceCount: Int = 7 - protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { - case 0 => prefix1 - case 1 => prefix2 - case 2 => prefix3 - case 3 => data4 - case 4 => suffix3 - case 5 => suffix2 - case 6 => suffix1 - } - protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { - case 0 => len1 - case 1 => len12 - case 2 => len123 - case 3 => len123 + data4.length*WIDTH3 - case 4 => len123 + data4.length*WIDTH3 + suffix3.length*WIDTH2 - case 5 => length0 - suffix1.length - case 6 => length0 - } - - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = - prepend1IfSpace(prefix1, prefix) match { - case null => super.prependedAll0(prefix, k) - case prefix1b => - val diff = prefix1b.length - prefix1.length - copy(prefix1 = prefix1b, - len1 = len1 + diff, - len12 = len12 + diff, - len123 = len123 + diff, - length0 = length0 + diff, - ) - } - - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { - val suffix1b = append1IfSpace(suffix1, suffix) - if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) - else super.appendedAll0(suffix, k) - } -} - - -/** 5-dimensional radix-balanced finger tree */ -private final class Vector5[+A](_prefix1: Arr1, private[immutable] val len1: Int, - private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, - private[immutable] val prefix3: Arr3, private[immutable] val len123: Int, - private[immutable] val prefix4: Arr4, private[immutable] val len1234: Int, - private[immutable] val data5: Arr5, - private[immutable] val suffix4: Arr4, private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1, - _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { - - @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, - prefix2: Arr2 = prefix2, len12: Int = len12, - prefix3: Arr3 = prefix3, len123: Int = len123, - prefix4: Arr4 = prefix4, len1234: Int = len1234, - data5: Arr5 = data5, - suffix4: Arr4 = suffix4, suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, - length0: Int = length0) = - new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, suffix4, suffix3, suffix2, suffix1, length0) - - @inline def apply(index: Int): A = { - if(index >= 0 && index < length0) { - val io = index - len1234 - if(io >= 0) { - val i5 = io >>> BITS4 - val i4 = (io >>> BITS3) & MASK - val i3 = (io >>> BITS2) & MASK - val i2 = (io >>> BITS) & MASK - val i1 = io & MASK - if(i5 < data5.length) data5(i5)(i4)(i3)(i2)(i1) - else if(i4 < suffix4.length) suffix4(i4)(i3)(i2)(i1) - else if(i3 < suffix3.length) suffix3(i3)(i2)(i1) - else if(i2 < suffix2.length) suffix2(i2)(i1) - else suffix1(i1) - } else if(index >= len123) { - val io = index - len123 - prefix4(io >>> BITS3)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK) - } else if(index >= len12) { - val io = index - len12 - prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK) - } else if(index >= len1) { - val io = index - len1 - prefix2(io >>> BITS)(io & MASK) - } else prefix1(index) - }.asInstanceOf[A] else throw ioob(index) - } - - override def updated[B >: A](index: Int, elem: B): Vector[B] = { - if(index >= 0 && index < length0) { - if(index >= len1234) { - val io = index - len1234 - val i5 = io >>> BITS4 - val i4 = (io >>> BITS3) & MASK - val i3 = (io >>> BITS2) & MASK - val i2 = (io >>> BITS) & MASK - val i1 = io & MASK - if (i5 < data5.length ) copy(data5 = copyUpdate(data5, i5, i4, i3, i2, i1, elem)) - else if(i4 < suffix4.length) copy(suffix4 = copyUpdate(suffix4, i4, i3, i2, i1, elem)) - else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem)) - else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) - else copy(suffix1 = copyUpdate(suffix1, i1, elem)) - } else if(index >= len123) { - val io = index - len123 - copy(prefix4 = copyUpdate(prefix4, io >>> BITS3, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem)) - } else if(index >= len12) { - val io = index - len12 - copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem)) - } else if(index >= len1) { - val io = index - len1 - copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) - } else { - copy(prefix1 = copyUpdate(prefix1, index, elem)) - } - } else throw ioob(index) - } - - override def appended[B >: A](elem: B): Vector[B] = { - if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) - else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) - else if(suffix3.length < WIDTH-1) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) - else if(suffix4.length < WIDTH-1) copy(suffix4 = copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) - else if(data5.length < WIDTH-2) copy(data5 = copyAppend(data5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) - else new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, (WIDTH-2)*WIDTH4 + len1234, empty6, wrap5(copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), empty4, empty3, empty2, wrap1(elem), length0+1) - } - - override def prepended[B >: A](elem: B): Vector[B] = { - if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, len1234 = len1234+1, length0 = length0+1) - else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, len1234 = len1234+1, length0 = length0+1) - else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, len1234 = len1234+1, length0 = length0+1) - else if(len1234 < WIDTH4 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), len1234+1, length0 = length0+1) - else if(data5.length < WIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), data5), length0 = length0+1) - else new Vector6(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, wrap5(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4)), len1234+1, empty6, data5, suffix4, suffix3, suffix2, suffix1, length0+1) - } - - override def map[B](f: A => B): Vector[B] = - copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), prefix4 = mapElems(4, prefix4, f), - data5 = mapElems(5, data5, f), - suffix4 = mapElems(4, suffix4, f), suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) - - protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { - val b = new VectorSliceBuilder(lo, hi) - b.consider(1, prefix1) - b.consider(2, prefix2) - b.consider(3, prefix3) - b.consider(4, prefix4) - b.consider(5, data5) - b.consider(4, suffix4) - b.consider(3, suffix3) - b.consider(2, suffix2) - b.consider(1, suffix1) - b.result() - } - - override def tail: Vector[A] = - if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, len1234 = len1234-1, length0 = length0-1) - else slice0(1, length0) - - override def init: Vector[A] = - if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) - else slice0(0, length0-1) - - protected[immutable] def vectorSliceCount: Int = 9 - protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { - case 0 => prefix1 - case 1 => prefix2 - case 2 => prefix3 - case 3 => prefix4 - case 4 => data5 - case 5 => suffix4 - case 6 => suffix3 - case 7 => suffix2 - case 8 => suffix1 - } - protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { - case 0 => len1 - case 1 => len12 - case 2 => len123 - case 3 => len1234 - case 4 => len1234 + data5.length*WIDTH4 - case 5 => len1234 + data5.length*WIDTH4 + suffix4.length*WIDTH3 - case 6 => len1234 + data5.length*WIDTH4 + suffix4.length*WIDTH3 + suffix3.length*WIDTH2 - case 7 => length0 - suffix1.length - case 8 => length0 - } - - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = - prepend1IfSpace(prefix1, prefix) match { - case null => super.prependedAll0(prefix, k) - case prefix1b => - val diff = prefix1b.length - prefix1.length - copy(prefix1 = prefix1b, - len1 = len1 + diff, - len12 = len12 + diff, - len123 = len123 + diff, - len1234 = len1234 + diff, - length0 = length0 + diff, - ) - } - - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { - val suffix1b = append1IfSpace(suffix1, suffix) - if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) - else super.appendedAll0(suffix, k) - } -} - - -/** 6-dimensional radix-balanced finger tree */ -private final class Vector6[+A](_prefix1: Arr1, private[immutable] val len1: Int, - private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, - private[immutable] val prefix3: Arr3, private[immutable] val len123: Int, - private[immutable] val prefix4: Arr4, private[immutable] val len1234: Int, - private[immutable] val prefix5: Arr5, private[immutable] val len12345: Int, - private[immutable] val data6: Arr6, - private[immutable] val suffix5: Arr5, private[immutable] val suffix4: Arr4, private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1, - _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { - - @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, - prefix2: Arr2 = prefix2, len12: Int = len12, - prefix3: Arr3 = prefix3, len123: Int = len123, - prefix4: Arr4 = prefix4, len1234: Int = len1234, - prefix5: Arr5 = prefix5, len12345: Int = len12345, - data6: Arr6 = data6, - suffix5: Arr5 = suffix5, suffix4: Arr4 = suffix4, suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, - length0: Int = length0) = - new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data6, suffix5, suffix4, suffix3, suffix2, suffix1, length0) - - @inline def apply(index: Int): A = { - if(index >= 0 && index < length0) { - val io = index - len12345 - if(io >= 0) { - val i6 = io >>> BITS5 - val i5 = (io >>> BITS4) & MASK - val i4 = (io >>> BITS3) & MASK - val i3 = (io >>> BITS2) & MASK - val i2 = (io >>> BITS) & MASK - val i1 = io & MASK - if(i6 < data6.length) data6(i6)(i5)(i4)(i3)(i2)(i1) - else if(i5 < suffix5.length) suffix5(i5)(i4)(i3)(i2)(i1) - else if(i4 < suffix4.length) suffix4(i4)(i3)(i2)(i1) - else if(i3 < suffix3.length) suffix3(i3)(i2)(i1) - else if(i2 < suffix2.length) suffix2(i2)(i1) - else suffix1(i1) - } else if(index >= len1234) { - val io = index - len1234 - prefix5(io >>> BITS4)((io >>> BITS3) & MASK)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK) - } else if(index >= len123) { - val io = index - len123 - prefix4(io >>> BITS3)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK) - } else if(index >= len12) { - val io = index - len12 - prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK) - } else if(index >= len1) { - val io = index - len1 - prefix2(io >>> BITS)(io & MASK) - } else prefix1(index) - }.asInstanceOf[A] else throw ioob(index) - } - - override def updated[B >: A](index: Int, elem: B): Vector[B] = { - if(index >= 0 && index < length0) { - if(index >= len12345) { - val io = index - len12345 - val i6 = io >>> BITS5 - val i5 = (io >>> BITS4) & MASK - val i4 = (io >>> BITS3) & MASK - val i3 = (io >>> BITS2) & MASK - val i2 = (io >>> BITS) & MASK - val i1 = io & MASK - if (i6 < data6.length ) copy(data6 = copyUpdate(data6, i6, i5, i4, i3, i2, i1, elem)) - else if(i5 < suffix5.length) copy(suffix5 = copyUpdate(suffix5, i5, i4, i3, i2, i1, elem)) - else if(i4 < suffix4.length) copy(suffix4 = copyUpdate(suffix4, i4, i3, i2, i1, elem)) - else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem)) - else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) - else copy(suffix1 = copyUpdate(suffix1, i1, elem)) - } else if(index >= len1234) { - val io = index - len1234 - copy(prefix5 = copyUpdate(prefix5, io >>> BITS4, (io >>> BITS3) & MASK, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem)) - } else if(index >= len123) { - val io = index - len123 - copy(prefix4 = copyUpdate(prefix4, io >>> BITS3, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem)) - } else if(index >= len12) { - val io = index - len12 - copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem)) - } else if(index >= len1) { - val io = index - len1 - copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) - } else { - copy(prefix1 = copyUpdate(prefix1, index, elem)) - } - } else throw ioob(index) - } - - override def appended[B >: A](elem: B): Vector[B] = { - if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) - else if(suffix2.length < WIDTH-1 ) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) - else if(suffix3.length < WIDTH-1 ) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) - else if(suffix4.length < WIDTH-1 ) copy(suffix4 = copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) - else if(suffix5.length < WIDTH-1 ) copy(suffix5 = copyAppend(suffix5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) - else if(data6.length < LASTWIDTH-2) copy(data6 = copyAppend(data6, copyAppend(suffix5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))))), suffix5 = empty5, suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) - else throw new IllegalArgumentException - } - - override def prepended[B >: A](elem: B): Vector[B] = { - if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1) - else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1) - else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1) - else if(len1234 < WIDTH4 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), len1234+1, len12345 = len12345+1, length0 = length0+1) - else if(len12345 < WIDTH5 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), prefix5), len12345+1, length0 = length0+1) - else if(data6.length < LASTWIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, empty5, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), prefix5), data6), length0 = length0+1) - else throw new IllegalArgumentException - } - - override def map[B](f: A => B): Vector[B] = - copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), prefix4 = mapElems(4, prefix4, f), prefix5 = mapElems(5, prefix5, f), - data6 = mapElems(6, data6, f), - suffix5 = mapElems(5, suffix5, f), suffix4 = mapElems(4, suffix4, f), suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) - - protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { - val b = new VectorSliceBuilder(lo, hi) - b.consider(1, prefix1) - b.consider(2, prefix2) - b.consider(3, prefix3) - b.consider(4, prefix4) - b.consider(5, prefix5) - b.consider(6, data6) - b.consider(5, suffix5) - b.consider(4, suffix4) - b.consider(3, suffix3) - b.consider(2, suffix2) - b.consider(1, suffix1) - b.result() - } - - override def tail: Vector[A] = - if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, len1234 = len1234-1, len12345 = len12345-1, length0 = length0-1) - else slice0(1, length0) - - override def init: Vector[A] = - if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) - else slice0(0, length0-1) - - protected[immutable] def vectorSliceCount: Int = 11 - protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { - case 0 => prefix1 - case 1 => prefix2 - case 2 => prefix3 - case 3 => prefix4 - case 4 => prefix5 - case 5 => data6 - case 6 => suffix5 - case 7 => suffix4 - case 8 => suffix3 - case 9 => suffix2 - case 10 => suffix1 - } - protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { - case 0 => len1 - case 1 => len12 - case 2 => len123 - case 3 => len1234 - case 4 => len12345 - case 5 => len12345 + data6.length*WIDTH5 - case 6 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 - case 7 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 + suffix4.length*WIDTH3 - case 8 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 + suffix4.length*WIDTH3 + suffix3.length*WIDTH2 - case 9 => length0 - suffix1.length - case 10 => length0 - } - - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = - prepend1IfSpace(prefix1, prefix) match { - case null => super.prependedAll0(prefix, k) - case prefix1b => - val diff = prefix1b.length - prefix1.length - copy(prefix1 = prefix1b, - len1 = len1 + diff, - len12 = len12 + diff, - len123 = len123 + diff, - len1234 = len1234 + diff, - len12345 = len12345 + diff, - length0 = length0 + diff, - ) - } - - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { - val suffix1b = append1IfSpace(suffix1, suffix) - if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) - else super.appendedAll0(suffix, k) - } -} - - -/** Helper class for vector slicing. It is initialized with the validated start and end index, - * then the vector slices are added in succession with `consider`. No matter what the dimension - * of the originating vector is or where the cut is performed, this always results in a - * structure with the highest-dimensional data in the middle and fingers of decreasing dimension - * at both ends, which can be turned into a new vector with very little rebalancing. - */ -private final class VectorSliceBuilder(lo: Int, hi: Int) { - //println(s"***** VectorSliceBuilder($lo, $hi)") - - private[this] val slices = new Array[Array[AnyRef]](11) - private[this] var len, pos, maxDim = 0 - - @inline private[this] def prefixIdx(n: Int) = n-1 - @inline private[this] def suffixIdx(n: Int) = 11-n - - def consider[T <: AnyRef](n: Int, a: Array[T]): Unit = { - //println(s"***** consider($n, /${a.length})") - val count = a.length * (1 << (BITS*(n-1))) - val lo0 = mmax(lo-pos, 0) - val hi0 = mmin(hi-pos, count) - if(hi0 > lo0) { - addSlice(n, a, lo0, hi0) - len += (hi0 - lo0) - } - pos += count - } - - private[this] def addSlice[T <: AnyRef](n: Int, a: Array[T], lo: Int, hi: Int): Unit = { - //println(s"***** addSlice($n, /${a.length}, $lo, $hi)") - if(n == 1) { - add(1, copyOrUse(a, lo, hi)) - } else { - val bitsN = BITS * (n-1) - val widthN = 1 << bitsN - val loN = lo >>> bitsN - val hiN = hi >>> bitsN - val loRest = lo & (widthN - 1) - val hiRest = hi & (widthN - 1) - //println(s"***** bitsN=$bitsN, loN=$loN, hiN=$hiN, loRest=$loRest, hiRest=$hiRest") - if(loRest == 0) { - if(hiRest == 0) { - add(n, copyOrUse(a, loN, hiN)) - } else { - if(hiN > loN) add(n, copyOrUse(a, loN, hiN)) - addSlice(n-1, a(hiN).asInstanceOf[Array[AnyRef]], 0, hiRest) - } - } else { - if(hiN == loN) { - addSlice(n-1, a(loN).asInstanceOf[Array[AnyRef]], loRest, hiRest) - } else { - addSlice(n-1, a(loN).asInstanceOf[Array[AnyRef]], loRest, widthN) - if(hiRest == 0) { - if(hiN > loN+1) add(n, copyOrUse(a, loN+1, hiN)) - } else { - if(hiN > loN+1) add(n, copyOrUse(a, loN+1, hiN)) - addSlice(n-1, a(hiN).asInstanceOf[Array[AnyRef]], 0, hiRest) - } - } - } - } - } - - private[this] def add[T <: AnyRef](n: Int, a: Array[T]): Unit = { - //println(s"***** add($n, /${a.length})") - val idx = - if(n <= maxDim) suffixIdx(n) - else { maxDim = n; prefixIdx(n) } - slices(idx) = a.asInstanceOf[Array[AnyRef]] - } - - def result[A](): Vector[A] = { - //println(s"***** result: $len, $maxDim") - if(len <= 32) { - if(len == 0) Vector0 - else { - val prefix1 = slices(prefixIdx(1)) - val suffix1 = slices(suffixIdx(1)) - //println(s"***** prefix1: ${if(prefix1 == null) "null" else prefix1.mkString("[", ",", "]")}, suffix1: ${if(suffix1 == null) "null" else suffix1.mkString("[", ",", "]")}") - val a: Arr1 = - if(prefix1 ne null) { - if(suffix1 ne null) concatArrays(prefix1, suffix1) - else prefix1 - } else if(suffix1 ne null) suffix1 - else { - val prefix2 = slices(prefixIdx(2)).asInstanceOf[Arr2] - if(prefix2 ne null) prefix2(0) - else { - val suffix2 = slices(suffixIdx(2)).asInstanceOf[Arr2] - suffix2(0) - } - } - new Vector1(a) - } - } else { - balancePrefix(1) - balanceSuffix(1) - var resultDim = maxDim - if(resultDim < 6) { - val pre = slices(prefixIdx(maxDim)) - val suf = slices(suffixIdx(maxDim)) - if((pre ne null) && (suf ne null)) { - // The highest-dimensional data consists of two slices: concatenate if they fit into the main data array, - // otherwise increase the dimension - if(pre.length + suf.length <= WIDTH-2) { - slices(prefixIdx(maxDim)) = concatArrays(pre, suf) - slices(suffixIdx(maxDim)) = null - } else resultDim += 1 - } else { - // A single highest-dimensional slice could have length WIDTH-1 if it came from a prefix or suffix but we - // only allow WIDTH-2 for the main data, so increase the dimension in this case - val one = if(pre ne null) pre else suf - if(one.length > WIDTH-2) resultDim += 1 - } - } - val prefix1 = slices(prefixIdx(1)) - val suffix1 = slices(suffixIdx(1)) - val len1 = prefix1.length - val res = (resultDim: @switch) match { - case 2 => - val data2 = dataOr(2, empty2) - new Vector2[A](prefix1, len1, data2, suffix1, len) - case 3 => - val prefix2 = prefixOr(2, empty2) - val data3 = dataOr(3, empty3) - val suffix2 = suffixOr(2, empty2) - val len12 = len1 + (prefix2.length * WIDTH) - new Vector3[A](prefix1, len1, prefix2, len12, data3, suffix2, suffix1, len) - case 4 => - val prefix2 = prefixOr(2, empty2) - val prefix3 = prefixOr(3, empty3) - val data4 = dataOr(4, empty4) - val suffix3 = suffixOr(3, empty3) - val suffix2 = suffixOr(2, empty2) - val len12 = len1 + (prefix2.length * WIDTH) - val len123 = len12 + (prefix3.length * WIDTH2) - new Vector4[A](prefix1, len1, prefix2, len12, prefix3, len123, data4, suffix3, suffix2, suffix1, len) - case 5 => - val prefix2 = prefixOr(2, empty2) - val prefix3 = prefixOr(3, empty3) - val prefix4 = prefixOr(4, empty4) - val data5 = dataOr(5, empty5) - val suffix4 = suffixOr(4, empty4) - val suffix3 = suffixOr(3, empty3) - val suffix2 = suffixOr(2, empty2) - val len12 = len1 + (prefix2.length * WIDTH) - val len123 = len12 + (prefix3.length * WIDTH2) - val len1234 = len123 + (prefix4.length * WIDTH3) - new Vector5[A](prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, suffix4, suffix3, suffix2, suffix1, len) - case 6 => - val prefix2 = prefixOr(2, empty2) - val prefix3 = prefixOr(3, empty3) - val prefix4 = prefixOr(4, empty4) - val prefix5 = prefixOr(5, empty5) - val data6 = dataOr(6, empty6) - val suffix5 = suffixOr(5, empty5) - val suffix4 = suffixOr(4, empty4) - val suffix3 = suffixOr(3, empty3) - val suffix2 = suffixOr(2, empty2) - val len12 = len1 + (prefix2.length * WIDTH) - val len123 = len12 + (prefix3.length * WIDTH2) - val len1234 = len123 + (prefix4.length * WIDTH3) - val len12345 = len1234 + (prefix5.length * WIDTH4) - new Vector6[A](prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data6, suffix5, suffix4, suffix3, suffix2, suffix1, len) - } - res - } - } - - @inline private[this] def prefixOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = { - val p = slices(prefixIdx(n)) - if(p ne null) p.asInstanceOf[Array[T]] else a - } - - @inline private[this] def suffixOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = { - val s = slices(suffixIdx(n)) - if(s ne null) s.asInstanceOf[Array[T]] else a - } - - @inline private[this] def dataOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = { - val p = slices(prefixIdx(n)) - if(p ne null) p.asInstanceOf[Array[T]] - else { - val s = slices(suffixIdx(n)) - if(s ne null) s.asInstanceOf[Array[T]] else a - } - } - - /** Ensure prefix is not empty */ - private[this] def balancePrefix(n: Int): Unit = { - if(slices(prefixIdx(n)) eq null) { - if(n == maxDim) { - slices(prefixIdx(n)) = slices(suffixIdx(n)) - slices(suffixIdx(n)) = null - } else { - balancePrefix(n+1) - val preN1 = slices(prefixIdx(n+1)).asInstanceOf[Array[Array[AnyRef]]] - //assert(preN1 ne null) - slices(prefixIdx(n)) = preN1(0) - if(preN1.length == 1) { - slices(prefixIdx(n+1)) = null - if((maxDim == n+1) && (slices(suffixIdx(n+1)) eq null)) maxDim = n - } else { - slices(prefixIdx(n+1)) = copyOfRange(preN1, 1, preN1.length).asInstanceOf[Array[AnyRef]] - } - } - } - } - - /** Ensure suffix is not empty */ - private[this] def balanceSuffix(n: Int): Unit = { - if(slices(suffixIdx(n)) eq null) { - if(n == maxDim) { - slices(suffixIdx(n)) = slices(prefixIdx(n)) - slices(prefixIdx(n)) = null - } else { - balanceSuffix(n+1) - val sufN1 = slices(suffixIdx(n+1)).asInstanceOf[Array[Array[AnyRef]]] - //assert(sufN1 ne null, s"n=$n, maxDim=$maxDim, slices=${slices.mkString(",")}") - slices(suffixIdx(n)) = sufN1(sufN1.length-1) - if(sufN1.length == 1) { - slices(suffixIdx(n+1)) = null - if((maxDim == n+1) && (slices(prefixIdx(n+1)) eq null)) maxDim = n - } else { - slices(suffixIdx(n+1)) = copyOfRange(sufN1, 0, sufN1.length-1).asInstanceOf[Array[AnyRef]] - } - } - } - } - - override def toString: String = - s"VectorSliceBuilder(lo=$lo, hi=$hi, len=$len, pos=$pos, maxDim=$maxDim)" - - private[immutable] def getSlices: Array[Array[AnyRef]] = slices -} - - -final class VectorBuilder[A] extends ReusableBuilder[A, Vector[A]] { - - private[this] var a6: Arr6 = _ - private[this] var a5: Arr5 = _ - private[this] var a4: Arr4 = _ - private[this] var a3: Arr3 = _ - private[this] var a2: Arr2 = _ - private[this] var a1: Arr1 = new Arr1(WIDTH) - private[this] var len1, lenRest, offset = 0 - private[this] var prefixIsRightAligned = false - private[this] var depth = 1 - - @inline private[this] final def setLen(i: Int): Unit = { - len1 = i & MASK - lenRest = i - len1 - } - - override def knownSize: Int = len1 + lenRest - offset - - @inline def size: Int = knownSize - @inline def isEmpty: Boolean = knownSize == 0 - @inline def nonEmpty: Boolean = knownSize != 0 - - def clear(): Unit = { - a6 = null - a5 = null - a4 = null - a3 = null - a2 = null - a1 = new Arr1(WIDTH) - len1 = 0 - lenRest = 0 - offset = 0 - prefixIsRightAligned = false - depth = 1 - } - - private[immutable] def initSparse(size: Int, elem: A): Unit = { - setLen(size) - Arrays.fill(a1, elem) - if(size > WIDTH) { - a2 = new Array(WIDTH) - Arrays.fill(a2.asInstanceOf[Array[AnyRef]], a1) - if(size > WIDTH2) { - a3 = new Array(WIDTH) - Arrays.fill(a3.asInstanceOf[Array[AnyRef]], a2) - if(size > WIDTH3) { - a4 = new Array(WIDTH) - Arrays.fill(a4.asInstanceOf[Array[AnyRef]], a3) - if(size > WIDTH4) { - a5 = new Array(WIDTH) - Arrays.fill(a5.asInstanceOf[Array[AnyRef]], a4) - if(size > WIDTH5) { - a6 = new Array(LASTWIDTH) - Arrays.fill(a6.asInstanceOf[Array[AnyRef]], a5) - depth = 6 - } else depth = 5 - } else depth = 4 - } else depth = 3 - } else depth = 2 - } else depth = 1 - } - - private[immutable] def initFrom(prefix1: Arr1): Unit = { - depth = 1 - setLen(prefix1.length) - a1 = copyOrUse(prefix1, 0, WIDTH) - if(len1 == 0 && lenRest > 0) { - // force advance() on next addition: - len1 = WIDTH - lenRest -= WIDTH - } - } - - private[immutable] def initFrom(v: Vector[_]): this.type = { - (v.vectorSliceCount: @switch) match { - case 0 => - case 1 => - val v1 = v.asInstanceOf[Vector1[_]] - depth = 1 - setLen(v1.prefix1.length) - a1 = copyOrUse(v1.prefix1, 0, WIDTH) - case 3 => - val v2 = v.asInstanceOf[Vector2[_]] - val d2 = v2.data2 - a1 = copyOrUse(v2.suffix1, 0, WIDTH) - depth = 2 - offset = WIDTH - v2.len1 - setLen(v2.length0 + offset) - a2 = new Arr2(WIDTH) - a2(0) = v2.prefix1 - System.arraycopy(d2, 0, a2, 1, d2.length) - a2(d2.length+1) = a1 - case 5 => - val v3 = v.asInstanceOf[Vector3[_]] - val d3 = v3.data3 - val s2 = v3.suffix2 - a1 = copyOrUse(v3.suffix1, 0, WIDTH) - depth = 3 - offset = WIDTH2 - v3.len12 - setLen(v3.length0 + offset) - a3 = new Arr3(WIDTH) - a3(0) = copyPrepend(v3.prefix1, v3.prefix2) - System.arraycopy(d3, 0, a3, 1, d3.length) - a2 = copyOf(s2, WIDTH) - a3(d3.length+1) = a2 - a2(s2.length) = a1 - case 7 => - val v4 = v.asInstanceOf[Vector4[_]] - val d4 = v4.data4 - val s3 = v4.suffix3 - val s2 = v4.suffix2 - a1 = copyOrUse(v4.suffix1, 0, WIDTH) - depth = 4 - offset = WIDTH3 - v4.len123 - setLen(v4.length0 + offset) - a4 = new Arr4(WIDTH) - a4(0) = copyPrepend(copyPrepend(v4.prefix1, v4.prefix2), v4.prefix3) - System.arraycopy(d4, 0, a4, 1, d4.length) - a3 = copyOf(s3, WIDTH) - a2 = copyOf(s2, WIDTH) - a4(d4.length+1) = a3 - a3(s3.length) = a2 - a2(s2.length) = a1 - case 9 => - val v5 = v.asInstanceOf[Vector5[_]] - val d5 = v5.data5 - val s4 = v5.suffix4 - val s3 = v5.suffix3 - val s2 = v5.suffix2 - a1 = copyOrUse(v5.suffix1, 0, WIDTH) - depth = 5 - offset = WIDTH4 - v5.len1234 - setLen(v5.length0 + offset) - a5 = new Arr5(WIDTH) - a5(0) = copyPrepend(copyPrepend(copyPrepend(v5.prefix1, v5.prefix2), v5.prefix3), v5.prefix4) - System.arraycopy(d5, 0, a5, 1, d5.length) - a4 = copyOf(s4, WIDTH) - a3 = copyOf(s3, WIDTH) - a2 = copyOf(s2, WIDTH) - a5(d5.length+1) = a4 - a4(s4.length) = a3 - a3(s3.length) = a2 - a2(s2.length) = a1 - case 11 => - val v6 = v.asInstanceOf[Vector6[_]] - val d6 = v6.data6 - val s5 = v6.suffix5 - val s4 = v6.suffix4 - val s3 = v6.suffix3 - val s2 = v6.suffix2 - a1 = copyOrUse(v6.suffix1, 0, WIDTH) - depth = 6 - offset = WIDTH5 - v6.len12345 - setLen(v6.length0 + offset) - a6 = new Arr6(LASTWIDTH) - a6(0) = copyPrepend(copyPrepend(copyPrepend(copyPrepend(v6.prefix1, v6.prefix2), v6.prefix3), v6.prefix4), v6.prefix5) - System.arraycopy(d6, 0, a6, 1, d6.length) - a5 = copyOf(s5, WIDTH) - a4 = copyOf(s4, WIDTH) - a3 = copyOf(s3, WIDTH) - a2 = copyOf(s2, WIDTH) - a6(d6.length+1) = a5 - a5(s5.length) = a4 - a4(s4.length) = a3 - a3(s3.length) = a2 - a2(s2.length) = a1 - } - if(len1 == 0 && lenRest > 0) { - // force advance() on next addition: - len1 = WIDTH - lenRest -= WIDTH - } - this - } - - //TODO Make public; this method is only private for binary compatibility - private[collection] def alignTo(before: Int, bigVector: Vector[A]): this.type = { - if (len1 != 0 || lenRest != 0) - throw new UnsupportedOperationException("A non-empty VectorBuilder cannot be aligned retrospectively. Please call .reset() or use a new VectorBuilder.") - val (prefixLength, maxPrefixLength) = bigVector match { - case Vector0 => (0, 1) - case v1: Vector1[_] => (0, 1) - case v2: Vector2[_] => (v2.len1, WIDTH) - case v3: Vector3[_] => (v3.len12, WIDTH2) - case v4: Vector4[_] => (v4.len123, WIDTH3) - case v5: Vector5[_] => (v5.len1234, WIDTH4) - case v6: Vector6[_] => (v6.len12345, WIDTH5) - } - if (maxPrefixLength == 1) return this // does not really make sense to align for <= 32 element-vector - val overallPrefixLength = (before + prefixLength) % maxPrefixLength - offset = (maxPrefixLength - overallPrefixLength) % maxPrefixLength - // pretend there are already `offset` elements added - advanceN(offset & ~MASK) - len1 = offset & MASK - prefixIsRightAligned = true - this - } - - /** - * Removes `offset` leading `null`s in the prefix. - * This is needed after calling `alignTo` and subsequent additions, - * directly before the result is used for creating a new Vector. - * Note that the outermost array keeps its length to keep the - * Builder re-usable. - * - * example: - * a2 = Array(null, ..., null, Array(null, .., null, 0, 1, .., x), Array(x+1, .., x+32), ...) - * becomes - * a2 = Array(Array(0, 1, .., x), Array(x+1, .., x+32), ..., ?, ..., ?) - */ - private[this] def leftAlignPrefix(): Unit = { - @inline def shrinkOffsetIfToLarge(width: Int): Unit = { - val newOffset = offset % width - lenRest -= offset - newOffset - offset = newOffset - } - var a: Array[AnyRef] = null // the array we modify - var aParent: Array[AnyRef] = null // a's parent, so aParent(0) == a - if (depth >= 6) { - a = a6.asInstanceOf[Array[AnyRef]] - val i = offset >>> BITS5 - if (i > 0) System.arraycopy(a, i, a, 0, LASTWIDTH - i) - shrinkOffsetIfToLarge(WIDTH5) - if ((lenRest >>> BITS5) == 0) depth = 5 - aParent = a - a = a(0).asInstanceOf[Array[AnyRef]] - } - if (depth >= 5) { - if (a == null) a = a5.asInstanceOf[Array[AnyRef]] - val i = (offset >>> BITS4) & MASK - if (depth == 5) { - if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) - a5 = a.asInstanceOf[Arr5] - shrinkOffsetIfToLarge(WIDTH4) - if ((lenRest >>> BITS4) == 0) depth = 4 - } else { - if (i > 0) a = copyOfRange(a, i, WIDTH) - aParent(0) = a - } - aParent = a - a = a(0).asInstanceOf[Array[AnyRef]] - } - if (depth >= 4) { - if (a == null) a = a4.asInstanceOf[Array[AnyRef]] - val i = (offset >>> BITS3) & MASK - if (depth == 4) { - if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) - a4 = a.asInstanceOf[Arr4] - shrinkOffsetIfToLarge(WIDTH3) - if ((lenRest >>> BITS3) == 0) depth = 3 - } else { - if (i > 0) a = copyOfRange(a, i, WIDTH) - aParent(0) = a - } - aParent = a - a = a(0).asInstanceOf[Array[AnyRef]] - } - if (depth >= 3) { - if (a == null) a = a3.asInstanceOf[Array[AnyRef]] - val i = (offset >>> BITS2) & MASK - if (depth == 3) { - if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) - a3 = a.asInstanceOf[Arr3] - shrinkOffsetIfToLarge(WIDTH2) - if ((lenRest >>> BITS2) == 0) depth = 2 - } else { - if (i > 0) a = copyOfRange(a, i, WIDTH) - aParent(0) = a - } - aParent = a - a = a(0).asInstanceOf[Array[AnyRef]] - } - if (depth >= 2) { - if (a == null) a = a2.asInstanceOf[Array[AnyRef]] - val i = (offset >>> BITS) & MASK - if (depth == 2) { - if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) - a2 = a.asInstanceOf[Arr2] - shrinkOffsetIfToLarge(WIDTH) - if ((lenRest >>> BITS) == 0) depth = 1 - } else { - if (i > 0) a = copyOfRange(a, i, WIDTH) - aParent(0) = a - } - aParent = a - a = a(0).asInstanceOf[Array[AnyRef]] - } - if (depth >= 1) { - if (a == null) a = a1.asInstanceOf[Array[AnyRef]] - val i = offset & MASK - if (depth == 1) { - if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) - a1 = a.asInstanceOf[Arr1] - len1 -= offset - offset = 0 - } else { - if (i > 0) a = copyOfRange(a, i, WIDTH) - aParent(0) = a - } - } - prefixIsRightAligned = false - } - - def addOne(elem: A): this.type = { - if(len1 == WIDTH) advance() - a1(len1) = elem.asInstanceOf[AnyRef] - len1 += 1 - this - } - - private[this] def addArr1(data: Arr1): Unit = { - val dl = data.length - if(dl > 0) { - if(len1 == WIDTH) advance() - val copy1 = mmin(WIDTH-len1, dl) - val copy2 = dl - copy1 - System.arraycopy(data, 0, a1, len1, copy1) - len1 += copy1 - if(copy2 > 0) { - advance() - System.arraycopy(data, copy1, a1, 0, copy2) - len1 += copy2 - } - } - } - - private[this] def addArrN(slice: Array[AnyRef], dim: Int): Unit = { -// assert(dim >= 2) -// assert(lenRest % WIDTH == 0) -// assert(len1 == 0 || len1 == WIDTH) - if (slice.isEmpty) return - if (len1 == WIDTH) advance() - val sl = slice.length - (dim: @switch) match { - case 2 => - // lenRest is always a multiple of WIDTH - val copy1 = mmin(((WIDTH2 - lenRest) >>> BITS) & MASK, sl) - val copy2 = sl - copy1 - val destPos = (lenRest >>> BITS) & MASK - System.arraycopy(slice, 0, a2, destPos, copy1) - advanceN(WIDTH * copy1) - if (copy2 > 0) { - System.arraycopy(slice, copy1, a2, 0, copy2) - advanceN(WIDTH * copy2) - } - case 3 => - if (lenRest % WIDTH2 != 0) { - // lenRest is not multiple of WIDTH2, so this slice does not align, need to try lower dimension - slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 2)) - return - } - val copy1 = mmin(((WIDTH3 - lenRest) >>> BITS2) & MASK, sl) - val copy2 = sl - copy1 - val destPos = (lenRest >>> BITS2) & MASK - System.arraycopy(slice, 0, a3, destPos, copy1) - advanceN(WIDTH2 * copy1) - if (copy2 > 0) { - System.arraycopy(slice, copy1, a3, 0, copy2) - advanceN(WIDTH2 * copy2) - } - case 4 => - if (lenRest % WIDTH3 != 0) { - // lenRest is not multiple of WIDTH3, so this slice does not align, need to try lower dimensions - slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 3)) - return - } - val copy1 = mmin(((WIDTH4 - lenRest) >>> BITS3) & MASK, sl) - val copy2 = sl - copy1 - val destPos = (lenRest >>> BITS3) & MASK - System.arraycopy(slice, 0, a4, destPos, copy1) - advanceN(WIDTH3 * copy1) - if (copy2 > 0) { - System.arraycopy(slice, copy1, a4, 0, copy2) - advanceN(WIDTH3 * copy2) - } - case 5 => - if (lenRest % WIDTH4 != 0) { - // lenRest is not multiple of WIDTH4, so this slice does not align, need to try lower dimensions - slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 4)) - return - } - val copy1 = mmin(((WIDTH5 - lenRest) >>> BITS4) & MASK, sl) - val copy2 = sl - copy1 - val destPos = (lenRest >>> BITS4) & MASK - System.arraycopy(slice, 0, a5, destPos, copy1) - advanceN(WIDTH4 * copy1) - if (copy2 > 0) { - System.arraycopy(slice, copy1, a5, 0, copy2) - advanceN(WIDTH4 * copy2) - } - case 6 => // note width is now LASTWIDTH - if (lenRest % WIDTH5 != 0) { - // lenRest is not multiple of WIDTH5, so this slice does not align, need to try lower dimensions - slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 5)) - return - } - val copy1 = sl - // there is no copy2 because there can't be another a6 to copy to - val destPos = lenRest >>> BITS5 - if (destPos + copy1 > LASTWIDTH) - throw new IllegalArgumentException("exceeding 2^31 elements") - System.arraycopy(slice, 0, a6, destPos, copy1) - advanceN(WIDTH5 * copy1) - } - } - - private[this] def addVector(xs: Vector[A]): this.type = { - val sliceCount = xs.vectorSliceCount - var sliceIdx = 0 - while(sliceIdx < sliceCount) { - val slice = xs.vectorSlice(sliceIdx) - vectorSliceDim(sliceCount, sliceIdx) match { - case 1 => addArr1(slice.asInstanceOf[Arr1]) - case n if len1 == WIDTH || len1 == 0 => - addArrN(slice.asInstanceOf[Array[AnyRef]], n) - case n => foreachRec(n-2, slice, addArr1) - } - sliceIdx += 1 - } - this - } - - override def addAll(xs: IterableOnce[A]^): this.type = xs match { - case v: Vector[_] => - if(len1 == 0 && lenRest == 0 && !prefixIsRightAligned) initFrom(v) - else addVector(v.asInstanceOf[Vector[A]]) - case _ => - super.addAll(xs) - } - - private[this] def advance(): Unit = { - val idx = lenRest + WIDTH - val xor = idx ^ lenRest - lenRest = idx - len1 = 0 - advance1(idx, xor) - } - - private[this] def advanceN(n: Int): Unit = if (n > 0) { - // assert(n % 32 == 0) - val idx = lenRest + n - val xor = idx ^ lenRest - lenRest = idx - len1 = 0 - advance1(idx, xor) - } - - private[this] def advance1(idx: Int, xor: Int): Unit = { - if (xor <= 0) { // level = 6 or something very unexpected happened - throw new IllegalArgumentException(s"advance1($idx, $xor): a1=$a1, a2=$a2, a3=$a3, a4=$a4, a5=$a5, a6=$a6, depth=$depth") - } else if (xor < WIDTH2) { // level = 1 - if (depth <= 1) { a2 = new Array(WIDTH); a2(0) = a1; depth = 2 } - a1 = new Array(WIDTH) - a2((idx >>> BITS) & MASK) = a1 - } else if (xor < WIDTH3) { // level = 2 - if (depth <= 2) { a3 = new Array(WIDTH); a3(0) = a2; depth = 3 } - a1 = new Array(WIDTH) - a2 = new Array(WIDTH) - a2((idx >>> BITS) & MASK) = a1 - a3((idx >>> BITS2) & MASK) = a2 - } else if (xor < WIDTH4) { // level = 3 - if (depth <= 3) { a4 = new Array(WIDTH); a4(0) = a3; depth = 4 } - a1 = new Array(WIDTH) - a2 = new Array(WIDTH) - a3 = new Array(WIDTH) - a2((idx >>> BITS) & MASK) = a1 - a3((idx >>> BITS2) & MASK) = a2 - a4((idx >>> BITS3) & MASK) = a3 - } else if (xor < WIDTH5) { // level = 4 - if (depth <= 4) { a5 = new Array(WIDTH); a5(0) = a4; depth = 5 } - a1 = new Array(WIDTH) - a2 = new Array(WIDTH) - a3 = new Array(WIDTH) - a4 = new Array(WIDTH) - a2((idx >>> BITS) & MASK) = a1 - a3((idx >>> BITS2) & MASK) = a2 - a4((idx >>> BITS3) & MASK) = a3 - a5((idx >>> BITS4) & MASK) = a4 - } else { // level = 5 - if (depth <= 5) { a6 = new Array(LASTWIDTH); a6(0) = a5; depth = 6 } - a1 = new Array(WIDTH) - a2 = new Array(WIDTH) - a3 = new Array(WIDTH) - a4 = new Array(WIDTH) - a5 = new Array(WIDTH) - a2((idx >>> BITS) & MASK) = a1 - a3((idx >>> BITS2) & MASK) = a2 - a4((idx >>> BITS3) & MASK) = a3 - a5((idx >>> BITS4) & MASK) = a4 - a6(idx >>> BITS5) = a5 - } - } - - def result(): Vector[A] = { - if (prefixIsRightAligned) leftAlignPrefix() - val len = len1 + lenRest - val realLen = len - offset - if(realLen == 0) Vector.empty - else if(len < 0) throw new IndexOutOfBoundsException(s"Vector cannot have negative size $len") - else if(len <= WIDTH) { - new Vector1(copyIfDifferentSize(a1, realLen)) - } else if(len <= WIDTH2) { - val i1 = (len-1) & MASK - val i2 = (len-1) >>> BITS - val data = copyOfRange(a2, 1, i2) - val prefix1 = a2(0) - val suffix1 = copyIfDifferentSize(a2(i2), i1+1) - new Vector2(prefix1, WIDTH-offset, data, suffix1, realLen) - } else if(len <= WIDTH3) { - val i1 = (len-1) & MASK - val i2 = ((len-1) >>> BITS) & MASK - val i3 = ((len-1) >>> BITS2) - val data = copyOfRange(a3, 1, i3) - val prefix2 = copyTail(a3(0)) - val prefix1 = a3(0)(0) - val suffix2 = copyOf(a3(i3), i2) - val suffix1 = copyIfDifferentSize(a3(i3)(i2), i1+1) - val len1 = prefix1.length - val len12 = len1 + prefix2.length*WIDTH - new Vector3(prefix1, len1, prefix2, len12, data, suffix2, suffix1, realLen) - } else if(len <= WIDTH4) { - val i1 = (len-1) & MASK - val i2 = ((len-1) >>> BITS) & MASK - val i3 = ((len-1) >>> BITS2) & MASK - val i4 = ((len-1) >>> BITS3) - val data = copyOfRange(a4, 1, i4) - val prefix3 = copyTail(a4(0)) - val prefix2 = copyTail(a4(0)(0)) - val prefix1 = a4(0)(0)(0) - val suffix3 = copyOf(a4(i4), i3) - val suffix2 = copyOf(a4(i4)(i3), i2) - val suffix1 = copyIfDifferentSize(a4(i4)(i3)(i2), i1+1) - val len1 = prefix1.length - val len12 = len1 + prefix2.length*WIDTH - val len123 = len12 + prefix3.length*WIDTH2 - new Vector4(prefix1, len1, prefix2, len12, prefix3, len123, data, suffix3, suffix2, suffix1, realLen) - } else if(len <= WIDTH5) { - val i1 = (len-1) & MASK - val i2 = ((len-1) >>> BITS) & MASK - val i3 = ((len-1) >>> BITS2) & MASK - val i4 = ((len-1) >>> BITS3) & MASK - val i5 = ((len-1) >>> BITS4) - val data = copyOfRange(a5, 1, i5) - val prefix4 = copyTail(a5(0)) - val prefix3 = copyTail(a5(0)(0)) - val prefix2 = copyTail(a5(0)(0)(0)) - val prefix1 = a5(0)(0)(0)(0) - val suffix4 = copyOf(a5(i5), i4) - val suffix3 = copyOf(a5(i5)(i4), i3) - val suffix2 = copyOf(a5(i5)(i4)(i3), i2) - val suffix1 = copyIfDifferentSize(a5(i5)(i4)(i3)(i2), i1+1) - val len1 = prefix1.length - val len12 = len1 + prefix2.length*WIDTH - val len123 = len12 + prefix3.length*WIDTH2 - val len1234 = len123 + prefix4.length*WIDTH3 - new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data, suffix4, suffix3, suffix2, suffix1, realLen) - } else { - val i1 = (len-1) & MASK - val i2 = ((len-1) >>> BITS) & MASK - val i3 = ((len-1) >>> BITS2) & MASK - val i4 = ((len-1) >>> BITS3) & MASK - val i5 = ((len-1) >>> BITS4) & MASK - val i6 = ((len-1) >>> BITS5) - val data = copyOfRange(a6, 1, i6) - val prefix5 = copyTail(a6(0)) - val prefix4 = copyTail(a6(0)(0)) - val prefix3 = copyTail(a6(0)(0)(0)) - val prefix2 = copyTail(a6(0)(0)(0)(0)) - val prefix1 = a6(0)(0)(0)(0)(0) - val suffix5 = copyOf(a6(i6), i5) - val suffix4 = copyOf(a6(i6)(i5), i4) - val suffix3 = copyOf(a6(i6)(i5)(i4), i3) - val suffix2 = copyOf(a6(i6)(i5)(i4)(i3), i2) - val suffix1 = copyIfDifferentSize(a6(i6)(i5)(i4)(i3)(i2), i1+1) - val len1 = prefix1.length - val len12 = len1 + prefix2.length*WIDTH - val len123 = len12 + prefix3.length*WIDTH2 - val len1234 = len123 + prefix4.length*WIDTH3 - val len12345 = len1234 + prefix5.length*WIDTH4 - new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data, suffix5, suffix4, suffix3, suffix2, suffix1, realLen) - } - } - - override def toString: String = - s"VectorBuilder(len1=$len1, lenRest=$lenRest, offset=$offset, depth=$depth)" - - private[immutable] def getData: Array[Array[_]] = Array[Array[AnyRef]]( - a1, a2.asInstanceOf[Array[AnyRef]], a3.asInstanceOf[Array[AnyRef]], a4.asInstanceOf[Array[AnyRef]], - a5.asInstanceOf[Array[AnyRef]], a6.asInstanceOf[Array[AnyRef]] - ).asInstanceOf[Array[Array[_]]] -} - - -/** Compile-time definitions for Vector. No references to this object should appear in bytecode. */ -private[immutable] object VectorInline { - // compile-time numeric constants - final val BITS = 5 - final val WIDTH = 1 << BITS - final val MASK = WIDTH - 1 - final val BITS2 = BITS * 2 - final val WIDTH2 = 1 << BITS2 - final val BITS3 = BITS * 3 - final val WIDTH3 = 1 << BITS3 - final val BITS4 = BITS * 4 - final val WIDTH4 = 1 << BITS4 - final val BITS5 = BITS * 5 - final val WIDTH5 = 1 << BITS5 - final val LASTWIDTH = WIDTH << 1 // 1 extra bit in the last level to go up to Int.MaxValue (2^31-1) instead of 2^30: - final val Log2ConcatFaster = 5 - final val AlignToFaster = 64 - - type Arr1 = Array[AnyRef] - type Arr2 = Array[Array[AnyRef]] - type Arr3 = Array[Array[Array[AnyRef]]] - type Arr4 = Array[Array[Array[Array[AnyRef]]]] - type Arr5 = Array[Array[Array[Array[Array[AnyRef]]]]] - type Arr6 = Array[Array[Array[Array[Array[Array[AnyRef]]]]]] - - /** Dimension of the slice at index */ - @inline def vectorSliceDim(count: Int, idx: Int): Int = { - val c = count/2 - c+1-abs(idx-c) - } - - @inline def copyOrUse[T <: AnyRef](a: Array[T], start: Int, end: Int): Array[T] = - if(start == 0 && end == a.length) a else copyOfRange[T](a, start, end) - - @inline final def copyTail[T <: AnyRef](a: Array[T]): Array[T] = copyOfRange[T](a, 1, a.length) - - @inline final def copyInit[T <: AnyRef](a: Array[T]): Array[T] = copyOfRange[T](a, 0, a.length-1) - - @inline final def copyIfDifferentSize[T <: AnyRef](a: Array[T], len: Int): Array[T] = - if(a.length == len) a else copyOf[T](a, len) - - @inline final def wrap1(x: Any ): Arr1 = { val a = new Arr1(1); a(0) = x.asInstanceOf[AnyRef]; a } - @inline final def wrap2(x: Arr1): Arr2 = { val a = new Arr2(1); a(0) = x; a } - @inline final def wrap3(x: Arr2): Arr3 = { val a = new Arr3(1); a(0) = x; a } - @inline final def wrap4(x: Arr3): Arr4 = { val a = new Arr4(1); a(0) = x; a } - @inline final def wrap5(x: Arr4): Arr5 = { val a = new Arr5(1); a(0) = x; a } - - @inline final def copyUpdate(a1: Arr1, idx1: Int, elem: Any): Arr1 = { - val a1c = a1.clone() - a1c(idx1) = elem.asInstanceOf[AnyRef] - a1c - } - - @inline final def copyUpdate(a2: Arr2, idx2: Int, idx1: Int, elem: Any): Arr2 = { - val a2c = a2.clone() - a2c(idx2) = copyUpdate(a2c(idx2), idx1, elem) - a2c - } - - @inline final def copyUpdate(a3: Arr3, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr3 = { - val a3c = a3.clone() - a3c(idx3) = copyUpdate(a3c(idx3), idx2, idx1, elem) - a3c - } - - @inline final def copyUpdate(a4: Arr4, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr4 = { - val a4c = a4.clone() - a4c(idx4) = copyUpdate(a4c(idx4), idx3, idx2, idx1, elem) - a4c - } - - @inline final def copyUpdate(a5: Arr5, idx5: Int, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr5 = { - val a5c = a5.clone() - a5c(idx5) = copyUpdate(a5c(idx5), idx4, idx3, idx2, idx1, elem) - a5c - } - - @inline final def copyUpdate(a6: Arr6, idx6: Int, idx5: Int, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr6 = { - val a6c = a6.clone() - a6c(idx6) = copyUpdate(a6c(idx6), idx5, idx4, idx3, idx2, idx1, elem) - a6c - } - - @inline final def concatArrays[T <: AnyRef](a: Array[T], b: Array[T]): Array[T] = { - val dest = copyOf[T](a, a.length+b.length) - System.arraycopy(b, 0, dest, a.length, b.length) - dest - } -} - - -/** Helper methods and constants for Vector. */ -private object VectorStatics { - - final def copyAppend1(a: Arr1, elem: Any): Arr1 = { - val alen = a.length - val ac = new Arr1(alen+1) - System.arraycopy(a, 0, ac, 0, alen) - ac(alen) = elem.asInstanceOf[AnyRef] - ac - } - - final def copyAppend[T <: AnyRef](a: Array[T], elem: T): Array[T] = { - val ac = copyOf(a, a.length+1) - ac(ac.length-1) = elem - ac - } - - final def copyPrepend1(elem: Any, a: Arr1): Arr1 = { - val ac = new Arr1(a.length+1) - System.arraycopy(a, 0, ac, 1, a.length) - ac(0) = elem.asInstanceOf[AnyRef] - ac - } - - final def copyPrepend[T <: AnyRef](elem: T, a: Array[T]): Array[T] = { - val ac = java.lang.reflect.Array.newInstance(a.getClass.getComponentType, a.length+1).asInstanceOf[Array[T]] - System.arraycopy(a, 0, ac, 1, a.length) - ac(0) = elem - ac - } - - final val empty1: Arr1 = new Array(0) - final val empty2: Arr2 = new Array(0) - final val empty3: Arr3 = new Array(0) - final val empty4: Arr4 = new Array(0) - final val empty5: Arr5 = new Array(0) - final val empty6: Arr6 = new Array(0) - - final def foreachRec[T <: AnyRef, A, U](level: Int, a: Array[T], f: A => U): Unit = { - var i = 0 - val len = a.length - if(level == 0) { - while(i < len) { - f(a(i).asInstanceOf[A]) - i += 1 - } - } else { - val l = level-1 - while(i < len) { - foreachRec(l, a(i).asInstanceOf[Array[AnyRef]], f) - i += 1 - } - } - } - - final def mapElems1[A, B](a: Arr1, f: A => B): Arr1 = { - var i = 0 - while(i < a.length) { - val v1 = a(i).asInstanceOf[AnyRef] - val v2 = f(v1.asInstanceOf[A]).asInstanceOf[AnyRef] - if(v1 ne v2) - return mapElems1Rest(a, f, i, v2) - i += 1 - } - a - } - - final def mapElems1Rest[A, B](a: Arr1, f: A => B, at: Int, v2: AnyRef): Arr1 = { - val ac = new Arr1(a.length) - if(at > 0) System.arraycopy(a, 0, ac, 0, at) - ac(at) = v2 - var i = at+1 - while(i < a.length) { - ac(i) = f(a(i).asInstanceOf[A]).asInstanceOf[AnyRef] - i += 1 - } - ac - } - - final def mapElems[A, B, T <: AnyRef](n: Int, a: Array[T], f: A => B): Array[T] = { - if(n == 1) - mapElems1[A, B](a.asInstanceOf[Arr1], f).asInstanceOf[Array[T]] - else { - var i = 0 - while(i < a.length) { - val v1 = a(i) - val v2 = mapElems(n-1, v1.asInstanceOf[Array[AnyRef]], f) - if(v1 ne v2) - return mapElemsRest(n, a, f, i, v2) - i += 1 - } - a - } - } - - final def mapElemsRest[A, B, T <: AnyRef](n: Int, a: Array[T], f: A => B, at: Int, v2: AnyRef): Array[T] = { - val ac = java.lang.reflect.Array.newInstance(a.getClass.getComponentType, a.length).asInstanceOf[Array[AnyRef]] - if(at > 0) System.arraycopy(a, 0, ac, 0, at) - ac(at) = v2 - var i = at+1 - while(i < a.length) { - ac(i) = mapElems(n-1, a(i).asInstanceOf[Array[AnyRef]], f) - i += 1 - } - ac.asInstanceOf[Array[T]] - } - - final def prepend1IfSpace(prefix1: Arr1, xs: IterableOnce[_]^): Arr1 = xs match { - case it: Iterable[_] => - if(it.sizeCompare(WIDTH-prefix1.length) <= 0) { - it.size match { - case 0 => null - case 1 => copyPrepend(it.head.asInstanceOf[AnyRef], prefix1) - case s => - val prefix1b = new Arr1(prefix1.length + s) - System.arraycopy(prefix1, 0, prefix1b, s, prefix1.length) - it.copyToArray(prefix1b.asInstanceOf[Array[Any]], 0) - prefix1b - } - } else null - case it => - val s = it.knownSize - if(s > 0 && s <= WIDTH-prefix1.length) { - val prefix1b = new Arr1(prefix1.length + s) - System.arraycopy(prefix1, 0, prefix1b, s, prefix1.length) - it.iterator.copyToArray(prefix1b.asInstanceOf[Array[Any]], 0) - prefix1b - } else null - } - - final def append1IfSpace(suffix1: Arr1, xs: IterableOnce[_]^): Arr1 = xs match { - case it: Iterable[_] => - if(it.sizeCompare(WIDTH-suffix1.length) <= 0) { - it.size match { - case 0 => null - case 1 => copyAppend(suffix1, it.head.asInstanceOf[AnyRef]) - case s => - val suffix1b = copyOf(suffix1, suffix1.length + s) - it.copyToArray(suffix1b.asInstanceOf[Array[Any]], suffix1.length) - suffix1b - } - } else null - case it => - val s = it.knownSize - if(s > 0 && s <= WIDTH-suffix1.length) { - val suffix1b = copyOf(suffix1, suffix1.length + s) - it.iterator.copyToArray(suffix1b.asInstanceOf[Array[Any]], suffix1.length) - suffix1b - } else null - } -} - - -private final class NewVectorIterator[A](v: Vector[A], private[this] var totalLength: Int, private[this] val sliceCount: Int) extends Iterator[A] with java.lang.Cloneable { - - private[this] var a1: Arr1 = v.prefix1 - private[this] var a2: Arr2 = _ - private[this] var a3: Arr3 = _ - private[this] var a4: Arr4 = _ - private[this] var a5: Arr5 = _ - private[this] var a6: Arr6 = _ - private[this] var a1len = a1.length - private[this] var i1 = 0 // current index in a1 - private[this] var oldPos = 0 - private[this] var len1 = totalLength // remaining length relative to a1 - - private[this] var sliceIdx = 0 - private[this] var sliceDim = 1 - private[this] var sliceStart = 0 // absolute position - private[this] var sliceEnd = a1len // absolute position - - //override def toString: String = - // s"NewVectorIterator(v=$v, totalLength=$totalLength, sliceCount=$sliceCount): a1len=$a1len, len1=$len1, i1=$i1, sliceEnd=$sliceEnd" - - @inline override def knownSize = len1 - i1 - - @inline def hasNext: Boolean = len1 > i1 - - def next(): A = { - if(i1 == a1len) advance() - val r = a1(i1) - i1 += 1 - r.asInstanceOf[A] - } - - private[this] def advanceSlice(): Unit = { - if(!hasNext) Iterator.empty.next() - sliceIdx += 1 - var slice: Array[_ <: AnyRef] = v.vectorSlice(sliceIdx) - while(slice.length == 0) { - sliceIdx += 1 - slice = v.vectorSlice(sliceIdx) - } - sliceStart = sliceEnd - sliceDim = vectorSliceDim(sliceCount, sliceIdx) - (sliceDim: @switch) match { - case 1 => a1 = slice.asInstanceOf[Arr1] - case 2 => a2 = slice.asInstanceOf[Arr2] - case 3 => a3 = slice.asInstanceOf[Arr3] - case 4 => a4 = slice.asInstanceOf[Arr4] - case 5 => a5 = slice.asInstanceOf[Arr5] - case 6 => a6 = slice.asInstanceOf[Arr6] - } - sliceEnd = sliceStart + slice.length * (1 << (BITS*(sliceDim-1))) - if(sliceEnd > totalLength) sliceEnd = totalLength - if(sliceDim > 1) oldPos = (1 << (BITS*sliceDim))-1 - } - - private[this] def advance(): Unit = { - val pos = i1-len1+totalLength - if(pos == sliceEnd) advanceSlice() - if(sliceDim > 1) { - val io = pos - sliceStart - val xor = oldPos ^ io - advanceA(io, xor) - oldPos = io - } - len1 -= i1 - a1len = mmin(a1.length, len1) - i1 = 0 - } - - private[this] def advanceA(io: Int, xor: Int): Unit = { - if(xor < WIDTH2) { - a1 = a2((io >>> BITS) & MASK) - } else if(xor < WIDTH3) { - a2 = a3((io >>> BITS2) & MASK) - a1 = a2(0) - } else if(xor < WIDTH4) { - a3 = a4((io >>> BITS3) & MASK) - a2 = a3(0) - a1 = a2(0) - } else if(xor < WIDTH5) { - a4 = a5((io >>> BITS4) & MASK) - a3 = a4(0) - a2 = a3(0) - a1 = a2(0) - } else { - a5 = a6(io >>> BITS5) - a4 = a5(0) - a3 = a4(0) - a2 = a3(0) - a1 = a2(0) - } - } - - private[this] def setA(io: Int, xor: Int): Unit = { - if(xor < WIDTH2) { - a1 = a2((io >>> BITS) & MASK) - } else if(xor < WIDTH3) { - a2 = a3((io >>> BITS2) & MASK) - a1 = a2((io >>> BITS) & MASK) - } else if(xor < WIDTH4) { - a3 = a4((io >>> BITS3) & MASK) - a2 = a3((io >>> BITS2) & MASK) - a1 = a2((io >>> BITS) & MASK) - } else if(xor < WIDTH5) { - a4 = a5((io >>> BITS4) & MASK) - a3 = a4((io >>> BITS3) & MASK) - a2 = a3((io >>> BITS2) & MASK) - a1 = a2((io >>> BITS) & MASK) - } else { - a5 = a6(io >>> BITS5) - a4 = a5((io >>> BITS4) & MASK) - a3 = a4((io >>> BITS3) & MASK) - a2 = a3((io >>> BITS2) & MASK) - a1 = a2((io >>> BITS) & MASK) - } - } - - override def drop(n: Int): Iterator[A] = { - if(n > 0) { - val oldpos = i1-len1+totalLength - val newpos = mmin(oldpos + n, totalLength) - if(newpos == totalLength) { - i1 = 0 - len1 = 0 - a1len = 0 - } else { - while(newpos >= sliceEnd) advanceSlice() - val io = newpos - sliceStart - if(sliceDim > 1) { - val xor = oldPos ^ io - setA(io, xor) - oldPos = io - } - a1len = a1.length - i1 = io & MASK - len1 = i1 + (totalLength-newpos) - if(a1len > len1) a1len = len1 - } - } - this - } - - override def take(n: Int): Iterator[A] = { - if(n < knownSize) { - val trunc = knownSize - mmax(0, n) - totalLength -= trunc - len1 -= trunc - if(len1 < a1len) a1len = len1 - if(totalLength < sliceEnd) sliceEnd = totalLength - } - this - } - - override def slice(from: Int, until: Int): Iterator[A] = { - val _until = - if(from > 0) { - drop(from) - until - from - } else until - take(_until) - } - - override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { - val xsLen = xs.length - val total = IterableOnce.elemsToCopyToArray(knownSize, xsLen, start, len) - var copied = 0 - val isBoxed = xs.isInstanceOf[Array[AnyRef]] - while(copied < total) { - if(i1 == a1len) advance() - val count = mmin(total-copied, a1.length-i1) - if(isBoxed) System.arraycopy(a1, i1, xs, start+copied, count) - else Array.copy(a1, i1, xs, start+copied, count) - i1 += count - copied += count - } - total - } - - override def toVector: Vector[A] = - v.slice(i1-len1+totalLength, totalLength) - - protected[immutable] def split(at: Int): NewVectorIterator[A] = { - val it2 = clone().asInstanceOf[NewVectorIterator[A]] - it2.take(at) - drop(at) - it2 - } -} - - -private abstract class VectorStepperBase[A, Sub >: Null <: Stepper[A], Semi <: Sub](it: NewVectorIterator[A]) - extends Stepper[A] with EfficientSplit { - - protected[this] def build(it: NewVectorIterator[A]): Semi - - final def hasStep: Boolean = it.hasNext - - final def characteristics: Int = Spliterator.ORDERED + Spliterator.SIZED + Spliterator.SUBSIZED - - final def estimateSize: Long = it.knownSize - - def trySplit(): Sub = { - val len = it.knownSize - if(len > 1) build(it.split(len >>> 1)) - else null - } - - override final def iterator: Iterator[A] = it -} - -private class AnyVectorStepper[A](it: NewVectorIterator[A]) - extends VectorStepperBase[A, AnyStepper[A], AnyVectorStepper[A]](it) with AnyStepper[A] { - protected[this] def build(it: NewVectorIterator[A]) = new AnyVectorStepper(it) - def nextStep(): A = it.next() -} - -private class DoubleVectorStepper(it: NewVectorIterator[Double]) - extends VectorStepperBase[Double, DoubleStepper, DoubleVectorStepper](it) with DoubleStepper { - protected[this] def build(it: NewVectorIterator[Double]) = new DoubleVectorStepper(it) - def nextStep(): Double = it.next() -} - -private class IntVectorStepper(it: NewVectorIterator[Int]) - extends VectorStepperBase[Int, IntStepper, IntVectorStepper](it) with IntStepper { - protected[this] def build(it: NewVectorIterator[Int]) = new IntVectorStepper(it) - def nextStep(): Int = it.next() -} - -private class LongVectorStepper(it: NewVectorIterator[Long]) - extends VectorStepperBase[Long, LongStepper, LongVectorStepper](it) with LongStepper { - protected[this] def build(it: NewVectorIterator[Long]) = new LongVectorStepper(it) - def nextStep(): Long = it.next() -} - - -// The following definitions are needed for binary compatibility with ParVector -private[collection] class VectorIterator[+A](_startIndex: Int, private[this] var endIndex: Int) extends AbstractIterator[A] { - private[immutable] var it: NewVectorIterator[A @uncheckedVariance @uncheckedCaptures] = _ - def hasNext: Boolean = it.hasNext - def next(): A = it.next() - private[collection] def remainingElementCount: Int = it.size - private[collection] def remainingVector: Vector[A] = it.toVector -} diff --git a/tests/pos-special/stdlib/collection/immutable/VectorMap.scala b/tests/pos-special/stdlib/collection/immutable/VectorMap.scala deleted file mode 100644 index 0860a0b47f28..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/VectorMap.scala +++ /dev/null @@ -1,277 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import scala.annotation.tailrec -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** This class implements immutable maps using a vector/map-based data structure, which preserves insertion order. - * - * Unlike `ListMap`, `VectorMap` has amortized effectively constant lookup at the expense - * of using extra memory and generally lower performance for other operations - * - * @tparam K the type of the keys contained in this vector map. - * @tparam V the type of the values associated with the keys in this vector map. - * - * @define coll immutable vector map - * @define Coll `immutable.VectorMap` - */ -final class VectorMap[K, +V] private ( - private[immutable] val fields: Vector[Any], - private[immutable] val underlying: Map[K, (Int, V)], dropped: Int) - extends AbstractMap[K, V] - with SeqMap[K, V] - with StrictOptimizedMapOps[K, V, VectorMap, VectorMap[K, V]] - with MapFactoryDefaults[K, V, VectorMap, Iterable] { - - import VectorMap._ - - override protected[this] def className: String = "VectorMap" - - private[immutable] def this(fields: Vector[K], underlying: Map[K, (Int, V)]) = { - this(fields, underlying, 0) - } - - override val size = underlying.size - - override def knownSize: Int = size - - override def isEmpty: Boolean = size == 0 - - def updated[V1 >: V](key: K, value: V1): VectorMap[K, V1] = { - underlying.get(key) match { - case Some((slot, _)) => - new VectorMap(fields, underlying.updated[(Int, V1)](key, (slot, value)), dropped) - case None => - new VectorMap(fields :+ key, underlying.updated[(Int, V1)](key, (fields.length + dropped, value)), dropped) - } - } - - override def withDefault[V1 >: V](d: K -> V1): Map[K, V1] = - new Map.WithDefault(this, d) - - override def withDefaultValue[V1 >: V](d: V1): Map[K, V1] = - new Map.WithDefault[K, V1](this, _ => d) - - def get(key: K): Option[V] = underlying.get(key) match { - case Some(v) => Some(v._2) - case None => None - } - - @tailrec - private def nextValidField(slot: Int): (Int, K) = { - if (slot >= fields.size) (-1, null.asInstanceOf[K]) - else fields(slot) match { - case Tombstone(distance) => - nextValidField(slot + distance) - case k => - (slot, k.asInstanceOf[K]) - } - } - - def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { - private[this] val fieldsLength = fields.length - private[this] var slot = -1 - private[this] var key: K = null.asInstanceOf[K] - - private[this] def advance(): Unit = { - val nextSlot = slot + 1 - if (nextSlot >= fieldsLength) { - slot = fieldsLength - key = null.asInstanceOf[K] - } else { - nextValidField(nextSlot) match { - case (-1, _) => - slot = fieldsLength - key = null.asInstanceOf[K] - case (s, k) => - slot = s - key = k - } - } - } - - advance() - - override def hasNext: Boolean = slot < fieldsLength - - override def next(): (K, V) = { - if (!hasNext) throw new NoSuchElementException("next called on depleted iterator") - val result = (key, underlying(key)._2) - advance() - result - } - } - - // No-Op overrides to allow for more efficient steppers in a minor release. - // Refining the return type to `S with EfficientSplit` is binary compatible. - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S = super.stepper(shape) - - override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S = super.keyStepper(shape) - - override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S = super.valueStepper(shape) - - - def removed(key: K): VectorMap[K, V] = { - if (isEmpty) empty - else { - var fs = fields - val sz = fs.size - underlying.get(key) match { - case Some(_) if size == 1 => empty - case Some((slot, _)) => - val s = slot - dropped - - // Calculate next of kin - val next = - if (s < sz - 1) fs(s + 1) match { - case Tombstone(d) => s + d + 1 - case _ => s + 1 - } else s + 1 - - fs = fs.updated(s, Tombstone(next - s)) - - // Calculate first index of preceding tombstone sequence - val first = - if (s > 0) { - fs(s - 1) match { - case Tombstone(d) if d < 0 => if (s + d >= 0) s + d else 0 - case Tombstone(d) if d == 1 => s - 1 - case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d) - case _ => s - } - }else s - fs = fs.updated(first, Tombstone(next - first)) - - // Calculate last index of succeeding tombstone sequence - val last = next - 1 - if (last != first) { - fs = fs.updated(last, Tombstone(first - 1 - last)) - } - new VectorMap(fs, underlying - key, dropped) - case _ => - this - } - } - } - - override def mapFactory: MapFactory[VectorMap] = VectorMap - - override def contains(key: K): Boolean = underlying.contains(key) - - override def head: (K, V) = iterator.next() - - override def last: (K, V) = { - if (isEmpty) throw new UnsupportedOperationException("empty.last") - val lastSlot = fields.length - 1 - val last = fields.last match { - case Tombstone(d) if d < 0 => fields(lastSlot + d).asInstanceOf[K] - case Tombstone(d) if d == 1 => fields(lastSlot - 1).asInstanceOf[K] - case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d) - case k => k.asInstanceOf[K] - } - (last, underlying(last)._2) - } - - override def lastOption: Option[(K, V)] = { - if (isEmpty) None - else Some(last) - } - - override def tail: VectorMap[K, V] = { - if (isEmpty) throw new UnsupportedOperationException("empty.tail") - val (slot, key) = nextValidField(0) - new VectorMap(fields.drop(slot + 1), underlying - key, dropped + slot + 1) - } - - override def init: VectorMap[K, V] = { - if (isEmpty) throw new UnsupportedOperationException("empty.init") - val lastSlot = fields.size - 1 - val (slot, key) = fields.last match { - case Tombstone(d) if d < 0 => (lastSlot + d, fields(lastSlot + d).asInstanceOf[K]) - case Tombstone(d) if d == 1 => (lastSlot - 1, fields(lastSlot - 1).asInstanceOf[K]) - case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d) - case k => (lastSlot, k.asInstanceOf[K]) - } - new VectorMap(fields.dropRight(fields.size - slot), underlying - key, dropped) - } - - override def keys: Vector[K] = keysIterator.toVector - - override def values: Iterable[V] = new Iterable[V] with IterableFactoryDefaults[V, Iterable] { - override def iterator: Iterator[V] = keysIterator.map(underlying(_)._2) - } -} - -object VectorMap extends MapFactory[VectorMap] { - //Class to mark deleted slots in 'fields'. - //When one or more consecutive slots are deleted, the 'distance' of the first 'Tombstone' - // represents the distance to the location of the next undeleted slot (or the last slot in 'fields' +1 if it does not exist). - //When two or more consecutive slots are deleted, the 'distance' of the trailing 'Tombstone' - // represents the distance to the location of the previous undeleted slot ( or -1 if it does not exist) multiplied by -1. - //For other deleted slots, it simply indicates that they have been deleted. - private[VectorMap] final case class Tombstone(distance: Int) - - private[this] final val EmptyMap: VectorMap[Nothing, Nothing] = - new VectorMap[Nothing, Nothing](Vector.empty[Nothing], HashMap.empty[Nothing, (Int, Nothing)]) - - def empty[K, V]: VectorMap[K, V] = EmptyMap.asInstanceOf[VectorMap[K, V]] - - def from[K, V](it: collection.IterableOnce[(K, V)]^): VectorMap[K, V] = - it match { - case vm: VectorMap[K, V] => vm - case _ => (newBuilder[K, V] ++= it).result() - } - - def newBuilder[K, V]: mutable.Builder[(K, V), VectorMap[K, V]] = new VectorMapBuilder[K, V] -} - -private[immutable] final class VectorMapBuilder[K, V] extends mutable.Builder[(K, V), VectorMap[K, V]] { - private[this] val vectorBuilder = new VectorBuilder[K] - private[this] val mapBuilder = new MapBuilderImpl[K, (Int, V)] - private[this] var aliased: VectorMap[K, V] @uncheckedCaptures = _ // OK since VectorMapBuilder is private - - override def clear(): Unit = { - vectorBuilder.clear() - mapBuilder.clear() - aliased = null - } - - override def result(): VectorMap[K, V] = { - if (aliased eq null) { - aliased = new VectorMap(vectorBuilder.result(), mapBuilder.result()) - } - aliased - } - def addOne(key: K, value: V): this.type = { - if (aliased ne null) { - aliased = aliased.updated(key, value) - } else { - mapBuilder.getOrElse(key, null) match { - case (slot, _) => - mapBuilder.addOne(key, (slot, value)) - case null => - val vectorSize = vectorBuilder.size - vectorBuilder.addOne(key) - mapBuilder.addOne(key, (vectorSize, value)) - } - } - this - } - - override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) -} diff --git a/tests/pos-special/stdlib/collection/immutable/WrappedString.scala b/tests/pos-special/stdlib/collection/immutable/WrappedString.scala deleted file mode 100644 index 47fe769c81ef..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/WrappedString.scala +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package immutable - -import scala.Predef.{wrapString => _, assert} -import scala.collection.Stepper.EfficientSplit -import scala.collection.convert.impl.CharStringStepper -import scala.collection.mutable.{Builder, StringBuilder} -import language.experimental.captureChecking - -/** - * This class serves as a wrapper augmenting `String`s with all the operations - * found in indexed sequences. - * - * The difference between this class and `StringOps` is that calling transformer - * methods such as `filter` and `map` will yield an object of type `WrappedString` - * rather than a `String`. - * - * @param self a string contained within this wrapped string - * - * @define Coll `WrappedString` - * @define coll wrapped string - */ -@SerialVersionUID(3L) -final class WrappedString(private val self: String) extends AbstractSeq[Char] with IndexedSeq[Char] - with IndexedSeqOps[Char, IndexedSeq, WrappedString] - with Serializable - with Pure { - - def apply(i: Int): Char = self.charAt(i) - - override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]^): WrappedString = WrappedString.fromSpecific(coll) - override protected def newSpecificBuilder: Builder[Char, WrappedString] = WrappedString.newBuilder - override def empty: WrappedString = WrappedString.empty - - override def slice(from: Int, until: Int): WrappedString = { - val start = if (from < 0) 0 else from - if (until <= start || start >= self.length) - return WrappedString.empty - - val end = if (until > length) length else until - new WrappedString(self.substring(start, end)) - } - override def length = self.length - override def toString = self - override def view: StringView = new StringView(self) - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = { - val st = new CharStringStepper(self, 0, self.length) - val r = - if (shape.shape == StepperShape.CharShape) st - else { - assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") - AnyStepper.ofParIntStepper(st) - } - r.asInstanceOf[S with EfficientSplit] - } - - override def startsWith[B >: Char](that: IterableOnce[B]^, offset: Int = 0): Boolean = - that match { - case s: WrappedString => self.startsWith(s.self, offset) - case _ => super.startsWith(that, offset) - } - - override def endsWith[B >: Char](that: collection.Iterable[B]^): Boolean = - that match { - case s: WrappedString => self.endsWith(s.self) - case _ => super.endsWith(that) - } - - override def indexOf[B >: Char](elem: B, from: Int = 0): Int = elem match { - case c: Char => self.indexOf(c, from) - case _ => super.indexOf(elem, from) - } - - override def lastIndexOf[B >: Char](elem: B, end: Int = length - 1): Int = - elem match { - case c: Char => self.lastIndexOf(c, end) - case _ => super.lastIndexOf(elem, end) - } - - override def copyToArray[sealed B >: Char](xs: Array[B], start: Int, len: Int): Int = - (xs: Any) match { - case chs: Array[Char] => - val copied = IterableOnce.elemsToCopyToArray(length, chs.length, start, len) - self.getChars(0, copied, chs, start) - copied - case _ => super.copyToArray(xs, start, len) - } - - override def appendedAll[B >: Char](suffix: IterableOnce[B]^): IndexedSeq[B] = - suffix match { - case s: WrappedString => new WrappedString(self concat s.self) - case _ => super.appendedAll(suffix) - } - - override def sameElements[B >: Char](o: IterableOnce[B]^) = o match { - case s: WrappedString => self == s.self - case _ => super.sameElements(o) - } - - override protected[this] def className = "WrappedString" - - override protected final def applyPreferredMaxLength: Int = Int.MaxValue - override def equals(other: Any): Boolean = other match { - case that: WrappedString => - this.self == that.self - case _ => - super.equals(other) - } -} - -/** A companion object for wrapped strings. - */ -@SerialVersionUID(3L) -object WrappedString extends SpecificIterableFactory[Char, WrappedString] { - def fromSpecific(it: IterableOnce[Char]^): WrappedString = { - val b = newBuilder - val s = it.knownSize - if(s >= 0) b.sizeHint(s) - b ++= it - b.result() - } - val empty: WrappedString = new WrappedString("") - def newBuilder: Builder[Char, WrappedString] = - new StringBuilder().mapResult(x => new WrappedString(x)) - - implicit class UnwrapOp(private val value: WrappedString) extends AnyVal { - def unwrap: String = value.self - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/package.scala b/tests/pos-special/stdlib/collection/immutable/package.scala deleted file mode 100644 index 985ef22859be..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/package.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -import language.experimental.captureChecking - -package object immutable { - type StringOps = scala.collection.StringOps - val StringOps = scala.collection.StringOps - type StringView = scala.collection.StringView - val StringView = scala.collection.StringView - - @deprecated("Use Iterable instead of Traversable", "2.13.0") - type Traversable[+X] = Iterable[X] - @deprecated("Use Iterable instead of Traversable", "2.13.0") - val Traversable = Iterable - - @deprecated("Use Map instead of DefaultMap", "2.13.0") - type DefaultMap[K, +V] = scala.collection.immutable.Map[K, V] -} diff --git a/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala b/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala deleted file mode 100644 index a6413649e219..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala +++ /dev/null @@ -1,603 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.annotation.nowarn -import scala.collection.generic.DefaultSerializationProxy -import scala.language.implicitConversions -import language.experimental.captureChecking - - -/** This class implements mutable maps with `AnyRef` keys based on a hash table with open addressing. - * - * Basic map operations on single entries, including `contains` and `get`, - * are typically significantly faster with `AnyRefMap` than [[HashMap]]. - * Note that numbers and characters are not handled specially in AnyRefMap; - * only plain `equals` and `hashCode` are used in comparisons. - * - * Methods that traverse or regenerate the map, including `foreach` and `map`, - * are not in general faster than with `HashMap`. The methods `foreachKey`, - * `foreachValue`, `mapValuesNow`, and `transformValues` are, however, faster - * than alternative ways to achieve the same functionality. - * - * Maps with open addressing may become less efficient at lookup after - * repeated addition/removal of elements. Although `AnyRefMap` makes a - * decent attempt to remain efficient regardless, calling `repack` - * on a map that will no longer have elements removed but will be - * used heavily may save both time and storage space. - * - * This map is not intended to contain more than 2^29^ entries (approximately - * 500 million). The maximum capacity is 2^30^, but performance will degrade - * rapidly as 2^30^ is approached. - * - */ -class AnyRefMap[K <: AnyRef, sealed V] private[collection] (defaultEntry: K -> V, initialBufferSize: Int, initBlank: Boolean) - extends AbstractMap[K, V] - with MapOps[K, V, Map, AnyRefMap[K, V]] - with StrictOptimizedIterableOps[(K, V), Iterable, AnyRefMap[K, V]] - with Serializable { - - import AnyRefMap._ - def this() = this(AnyRefMap.exceptionDefault, 16, true) - - /** Creates a new `AnyRefMap` that returns default values according to a supplied key-value mapping. */ - def this(defaultEntry: K -> V) = this(defaultEntry, 16, true) - - /** Creates a new `AnyRefMap` with an initial buffer of specified size. - * - * An `AnyRefMap` can typically contain half as many elements as its buffer size - * before it requires resizing. - */ - def this(initialBufferSize: Int) = this(AnyRefMap.exceptionDefault, initialBufferSize, true) - - /** Creates a new `AnyRefMap` with specified default values and initial buffer size. */ - def this(defaultEntry: K -> V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) - - private[this] var mask = 0 - private[this] var _size = 0 - private[this] var _vacant = 0 - private[this] var _hashes: Array[Int] = null - private[this] var _keys: Array[AnyRef] = null - private[this] var _values: Array[AnyRef] = null - - if (initBlank) defaultInitialize(initialBufferSize) - - private[this] def defaultInitialize(n: Int): Unit = { - mask = - if (n<0) 0x7 - else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7 - _hashes = new Array[Int](mask+1) - _keys = new Array[AnyRef](mask+1) - _values = new Array[AnyRef](mask+1) - } - - private[collection] def initializeTo( - m: Int, sz: Int, vc: Int, hz: Array[Int], kz: Array[AnyRef], vz: Array[AnyRef] - ): Unit = { - mask = m; _size = sz; _vacant = vc; _hashes = hz; _keys = kz; _values = vz - } - - override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): AnyRefMap[K,V] = { - var sz = coll.knownSize - if(sz < 0) sz = 4 - val arm = new AnyRefMap[K, V](sz * 2) - coll.iterator.foreach{ case (k,v) => arm(k) = v } - if (arm.size < (sz>>3)) arm.repack() - arm - } - override protected def newSpecificBuilder: Builder[(K, V), AnyRefMap[K,V]] = new AnyRefMapBuilder - - override def size: Int = _size - override def knownSize: Int = size - override def isEmpty: Boolean = _size == 0 - override def empty: AnyRefMap[K,V] = new AnyRefMap(defaultEntry) - - private def imbalanced: Boolean = - (_size + _vacant) > 0.5*mask || _vacant > _size - - private def hashOf(key: K): Int = { - // Note: this method must not return 0 or Int.MinValue, as these indicate no element - if (key eq null) 0x41081989 - else { - val h = key.hashCode - // Part of the MurmurHash3 32 bit finalizer - val i = (h ^ (h >>> 16)) * 0x85EBCA6B - val j = (i ^ (i >>> 13)) & 0x7FFFFFFF - if (j==0) 0x41081989 else j - } - } - - private def seekEntry(h: Int, k: AnyRef): Int = { - var e = h & mask - var x = 0 - var g = 0 - val hashes = _hashes - val keys = _keys - while ({ g = hashes(e); g != 0}) { - if (g == h && { val q = keys(e); (q eq k) || ((q ne null) && (q equals k)) }) return e - x += 1 - e = (e + 2*(x+1)*x - 3) & mask - } - e | MissingBit - } - - @`inline` private def seekEntryOrOpen(h: Int, k: AnyRef): Int = { - var e = h & mask - var x = 0 - var g = 0 - var o = -1 - while ({ g = _hashes(e); g != 0}) { - if (g == h && { val q = _keys(e); (q eq k) || ((q ne null) && (q equals k)) }) return e - else if (o == -1 && g+g == 0) o = e - x += 1 - e = (e + 2*(x+1)*x - 3) & mask - } - if (o >= 0) o | MissVacant else e | MissingBit - } - - override def contains(key: K): Boolean = seekEntry(hashOf(key), key) >= 0 - - override def get(key: K): Option[V] = { - val i = seekEntry(hashOf(key), key) - if (i < 0) None else Some(_values(i).asInstanceOf[V]) - } - - override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { - val i = seekEntry(hashOf(key), key) - if (i < 0) default else _values(i).asInstanceOf[V] - } - - override def getOrElseUpdate(key: K, defaultValue: => V): V = { - val h = hashOf(key) - var i = seekEntryOrOpen(h, key) - if (i < 0) { - // It is possible that the default value computation was side-effecting - // Our hash table may have resized or even contain what we want now - // (but if it does, we'll replace it) - val value = { - val oh = _hashes - val ans = defaultValue - if (oh ne _hashes) { - i = seekEntryOrOpen(h, key) - if (i >= 0) _size -= 1 - } - ans - } - _size += 1 - val j = i & IndexMask - _hashes(j) = h - _keys(j) = key.asInstanceOf[AnyRef] - _values(j) = value.asInstanceOf[AnyRef] - if ((i & VacantBit) != 0) _vacant -= 1 - else if (imbalanced) repack() - value - } - else _values(i).asInstanceOf[V] - } - - /** Retrieves the value associated with a key, or the default for that type if none exists - * (null for AnyRef, 0 for floats and integers). - * - * Note: this is the fastest way to retrieve a value that may or - * may not exist, if the default null/zero is acceptable. For key/value - * pairs that do exist, `apply` (i.e. `map(key)`) is equally fast. - */ - def getOrNull(key: K): V = { - val i = seekEntry(hashOf(key), key) - (if (i < 0) null else _values(i)).asInstanceOf[V] - } - - /** Retrieves the value associated with a key. - * If the key does not exist in the map, the `defaultEntry` for that key - * will be returned instead; an exception will be thrown if no - * `defaultEntry` was supplied. - */ - override def apply(key: K): V = { - val i = seekEntry(hashOf(key), key) - if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V] - } - - /** Defers to defaultEntry to find a default value for the key. Throws an - * exception if no other default behavior was specified. - */ - override def default(key: K): V = defaultEntry(key) - - private def repack(newMask: Int): Unit = { - val oh = _hashes - val ok = _keys - val ov = _values - mask = newMask - _hashes = new Array[Int](mask+1) - _keys = new Array[AnyRef](mask+1) - _values = new Array[AnyRef](mask+1) - _vacant = 0 - var i = 0 - while (i < oh.length) { - val h = oh(i) - if (h+h != 0) { - var e = h & mask - var x = 0 - while (_hashes(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } - _hashes(e) = h - _keys(e) = ok(i) - _values(e) = ov(i) - } - i += 1 - } - } - - /** Repacks the contents of this `AnyRefMap` for maximum efficiency of lookup. - * - * For maps that undergo a complex creation process with both addition and - * removal of keys, and then are used heavily with no further removal of - * elements, calling `repack` after the end of the creation can result in - * improved performance. Repacking takes time proportional to the number - * of entries in the map. - */ - def repack(): Unit = { - var m = mask - if (_size + _vacant >= 0.5*mask && !(_vacant > 0.2*mask)) m = ((m << 1) + 1) & IndexMask - while (m > 8 && 8*_size < m) m = m >>> 1 - repack(m) - } - - override def put(key: K, value: V): Option[V] = { - val h = hashOf(key) - val i = seekEntryOrOpen(h, key) - if (i < 0) { - val j = i & IndexMask - _hashes(j) = h - _keys(j) = key - _values(j) = value.asInstanceOf[AnyRef] - _size += 1 - if ((i & VacantBit) != 0) _vacant -= 1 - else if (imbalanced) repack() - None - } - else { - val ans = Some(_values(i).asInstanceOf[V]) - _hashes(i) = h - _values(i) = value.asInstanceOf[AnyRef] - ans - } - } - - /** Updates the map to include a new key-value pair. - * - * This is the fastest way to add an entry to an `AnyRefMap`. - */ - override def update(key: K, value: V): Unit = { - val h = hashOf(key) - val i = seekEntryOrOpen(h, key) - if (i < 0) { - val j = i & IndexMask - _hashes(j) = h - _keys(j) = key - _values(j) = value.asInstanceOf[AnyRef] - _size += 1 - if ((i & VacantBit) != 0) _vacant -= 1 - else if (imbalanced) repack() - } - else { - _hashes(i) = h - _values(i) = value.asInstanceOf[AnyRef] - } - } - - /** Adds a new key/value pair to this map and returns the map. */ - @deprecated("Use `addOne` or `update` instead; infix operations with an operand of multiple args will be deprecated", "2.13.3") - def +=(key: K, value: V): this.type = { update(key, value); this } - - /** Adds a new key/value pair to this map and returns the map. */ - @inline final def addOne(key: K, value: V): this.type = { update(key, value); this } - - @inline override final def addOne(kv: (K, V)): this.type = { update(kv._1, kv._2); this } - - def subtractOne(key: K): this.type = { - val i = seekEntry(hashOf(key), key) - if (i >= 0) { - _size -= 1 - _vacant += 1 - _hashes(i) = Int.MinValue - _keys(i) = null - _values(i) = null - } - this - } - - def iterator: Iterator[(K, V)] = new AnyRefMapIterator[(K, V)] { - protected def nextResult(k: K, v: V) = (k, v) - } - override def keysIterator: Iterator[K] = new AnyRefMapIterator[K] { - protected def nextResult(k: K, v: V) = k - } - override def valuesIterator: Iterator[V] = new AnyRefMapIterator[V] { - protected def nextResult(k: K, v: V) = v - } - - private abstract class AnyRefMapIterator[A] extends AbstractIterator[A] { - private[this] val hz = _hashes - private[this] val kz = _keys - private[this] val vz = _values - - private[this] var index = 0 - - def hasNext: Boolean = index= hz.length) return false - h = hz(index) - } - true - } - - def next(): A = { - if (hasNext) { - val ans = nextResult(kz(index).asInstanceOf[K], vz(index).asInstanceOf[V]) - index += 1 - ans - } - else throw new NoSuchElementException("next") - } - - protected def nextResult(k: K, v: V): A - } - - - override def foreach[U](f: ((K,V)) => U): Unit = { - var i = 0 - var e = _size - while (e > 0) { - while(i < _hashes.length && { val h = _hashes(i); h+h == 0 && i < _hashes.length}) i += 1 - if (i < _hashes.length) { - f((_keys(i).asInstanceOf[K], _values(i).asInstanceOf[V])) - i += 1 - e -= 1 - } - else return - } - } - - override def foreachEntry[U](f: (K,V) => U): Unit = { - var i = 0 - var e = _size - while (e > 0) { - while(i < _hashes.length && { val h = _hashes(i); h+h == 0 && i < _hashes.length}) i += 1 - if (i < _hashes.length) { - f(_keys(i).asInstanceOf[K], _values(i).asInstanceOf[V]) - i += 1 - e -= 1 - } - else return - } - } - - override def clone(): AnyRefMap[K, V] = { - val hz = java.util.Arrays.copyOf(_hashes, _hashes.length) - val kz = java.util.Arrays.copyOf(_keys, _keys.length) - val vz = java.util.Arrays.copyOf(_values, _values.length) - val arm = new AnyRefMap[K, V](defaultEntry, 1, false) - arm.initializeTo(mask, _size, _vacant, hz, kz, vz) - arm - } - - @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") - override def + [sealed V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = AnyRefMap.from(new View.Appended(this, kv)) - - @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - override def + [sealed V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): AnyRefMap[K, V1] = { - val m = this + elem1 + elem2 - if(elems.isEmpty) m else m.concat(elems) - } - - override def concat[sealed V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]^): AnyRefMap[K, V2] = { - val arm = clone().asInstanceOf[AnyRefMap[K, V2]] - xs.iterator.foreach(kv => arm += kv) - arm - } - - override def ++[sealed V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]^): AnyRefMap[K, V2] = concat(xs) - - @deprecated("Use m.clone().addOne(k,v) instead of m.updated(k, v)", "2.13.0") - override def updated[sealed V1 >: V](key: K, value: V1): AnyRefMap[K, V1] = - clone().asInstanceOf[AnyRefMap[K, V1]].addOne(key, value) - - private[this] def foreachElement[A,B](elems: Array[AnyRef], f: A => B): Unit = { - var i,j = 0 - while (i < _hashes.length & j < _size) { - val h = _hashes(i) - if (h+h != 0) { - j += 1 - f(elems(i).asInstanceOf[A]) - } - i += 1 - } - } - - /** Applies a function to all keys of this map. */ - def foreachKey[A](f: K => A): Unit = foreachElement[K,A](_keys, f) - - /** Applies a function to all values of this map. */ - def foreachValue[A](f: V => A): Unit = foreachElement[V,A](_values, f) - - /** Creates a new `AnyRefMap` with different values. - * Unlike `mapValues`, this method generates a new - * collection immediately. - */ - def mapValuesNow[sealed V1](f: V => V1): AnyRefMap[K, V1] = { - val arm = new AnyRefMap[K,V1](AnyRefMap.exceptionDefault, 1, false) - val hz = java.util.Arrays.copyOf(_hashes, _hashes.length) - val kz = java.util.Arrays.copyOf(_keys, _keys.length) - val vz = new Array[AnyRef](_values.length) - var i,j = 0 - while (i < _hashes.length & j < _size) { - val h = _hashes(i) - if (h+h != 0) { - j += 1 - vz(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] - } - i += 1 - } - arm.initializeTo(mask, _size, _vacant, hz, kz, vz) - arm - } - - /** Applies a transformation function to all values stored in this map. - * Note: the default, if any, is not transformed. - */ - @deprecated("Use transformValuesInPlace instead of transformValues", "2.13.0") - @`inline` final def transformValues(f: V => V): this.type = transformValuesInPlace(f) - - /** Applies a transformation function to all values stored in this map. - * Note: the default, if any, is not transformed. - */ - def transformValuesInPlace(f: V => V): this.type = { - var i,j = 0 - while (i < _hashes.length & j < _size) { - val h = _hashes(i) - if (h+h != 0) { - j += 1 - _values(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] - } - i += 1 - } - this - } - - // The implicit dummy parameter is necessary to distinguish these methods from the base methods they overload (not override) - def map[K2 <: AnyRef, sealed V2](f: ((K, V)) => (K2, V2))(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = - AnyRefMap.from(new View.Map(this, f)) - def flatMap[K2 <: AnyRef, sealed V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = - AnyRefMap.from(new View.FlatMap(this, f)) - def collect[K2 <: AnyRef, sealed V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = - strictOptimizedCollect(AnyRefMap.newBuilder[K2, V2], pf) - - override def clear(): Unit = { - import java.util.Arrays.fill - fill(_keys, null) - fill(_values, null) - fill(_hashes, 0) - _size = 0 - _vacant = 0 - } - - protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(AnyRefMap.toFactory[K, V](AnyRefMap), this) - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "AnyRefMap" -} - -object AnyRefMap { - private final val IndexMask = 0x3FFFFFFF - private final val MissingBit = 0x80000000 - private final val VacantBit = 0x40000000 - private final val MissVacant = 0xC0000000 - - private class ExceptionDefault extends (Any -> Nothing) with Serializable { - def apply(k: Any): Nothing = throw new NoSuchElementException(if (k == null) "(null)" else k.toString) - } - private val exceptionDefault = new ExceptionDefault - - /** A builder for instances of `AnyRefMap`. - * - * This builder can be reused to create multiple instances. - */ - final class AnyRefMapBuilder[K <: AnyRef, sealed V] extends ReusableBuilder[(K, V), AnyRefMap[K, V]] { - private[collection] var elems: AnyRefMap[K, V] = new AnyRefMap[K, V] - def addOne(entry: (K, V)): this.type = { - elems += entry - this - } - def clear(): Unit = elems = new AnyRefMap[K, V] - def result(): AnyRefMap[K, V] = elems - override def knownSize: Int = elems.knownSize - } - - /** Creates a new `AnyRefMap` with zero or more key/value pairs. */ - def apply[K <: AnyRef, sealed V](elems: (K, V)*): AnyRefMap[K, V] = buildFromIterableOnce(elems) - - def newBuilder[K <: AnyRef, sealed V]: ReusableBuilder[(K, V), AnyRefMap[K, V]] = new AnyRefMapBuilder[K, V] - - private def buildFromIterableOnce[K <: AnyRef, sealed V](elems: IterableOnce[(K, V)]^): AnyRefMap[K, V] = { - var sz = elems.knownSize - if(sz < 0) sz = 4 - val arm = new AnyRefMap[K, V](sz * 2) - elems.iterator.foreach{ case (k,v) => arm(k) = v } - if (arm.size < (sz>>3)) arm.repack() - arm - } - - /** Creates a new empty `AnyRefMap`. */ - def empty[K <: AnyRef, sealed V]: AnyRefMap[K, V] = new AnyRefMap[K, V] - - /** Creates a new empty `AnyRefMap` with the supplied default */ - def withDefault[K <: AnyRef, sealed V](default: K -> V): AnyRefMap[K, V] = new AnyRefMap[K, V](default) - - /** Creates a new `AnyRefMap` from an existing source collection. A source collection - * which is already an `AnyRefMap` gets cloned. - * - * @param source Source collection - * @tparam K the type of the keys - * @tparam V the type of the values - * @return a new `AnyRefMap` with the elements of `source` - */ - def from[K <: AnyRef, sealed V](source: IterableOnce[(K, V)]^): AnyRefMap[K, V] = source match { - case source: AnyRefMap[_, _] => source.clone().asInstanceOf[AnyRefMap[K, V]] - case _ => buildFromIterableOnce(source) - } - - /** Creates a new `AnyRefMap` from arrays of keys and values. - * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`. - */ - def fromZip[K <: AnyRef, sealed V](keys: Array[K], values: Array[V]): AnyRefMap[K, V] = { - val sz = math.min(keys.length, values.length) - val arm = new AnyRefMap[K, V](sz * 2) - var i = 0 - while (i < sz) { arm(keys(i)) = values(i); i += 1 } - if (arm.size < (sz>>3)) arm.repack() - arm - } - - /** Creates a new `AnyRefMap` from keys and values. - * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`. - */ - def fromZip[K <: AnyRef, sealed V](keys: Iterable[K]^, values: Iterable[V]^): AnyRefMap[K, V] = { - val sz = math.min(keys.size, values.size) - val arm = new AnyRefMap[K, V](sz * 2) - val ki = keys.iterator - val vi = values.iterator - while (ki.hasNext && vi.hasNext) arm(ki.next()) = vi.next() - if (arm.size < (sz >> 3)) arm.repack() - arm - } - - implicit def toFactory[K <: AnyRef, sealed V](dummy: AnyRefMap.type): Factory[(K, V), AnyRefMap[K, V]] = ToFactory.asInstanceOf[Factory[(K, V), AnyRefMap[K, V]]] - - @SerialVersionUID(3L) - private[this] object ToFactory extends Factory[(AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] with Serializable { - def fromSpecific(it: IterableOnce[(AnyRef, AnyRef)]^): AnyRefMap[AnyRef, AnyRef] = AnyRefMap.from[AnyRef, AnyRef](it) - def newBuilder: Builder[(AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] = AnyRefMap.newBuilder[AnyRef, AnyRef] - } - - implicit def toBuildFrom[K <: AnyRef, sealed V](factory: AnyRefMap.type): BuildFrom[Any, (K, V), AnyRefMap[K, V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (K, V), AnyRefMap[K, V]]] - private[this] object ToBuildFrom extends BuildFrom[Any, (AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] { - def fromSpecific(from: Any)(it: IterableOnce[(AnyRef, AnyRef)]^) = AnyRefMap.from(it) - def newBuilder(from: Any) = AnyRefMap.newBuilder[AnyRef, AnyRef] - } - - implicit def iterableFactory[K <: AnyRef, sealed V]: Factory[(K, V), AnyRefMap[K, V]] = toFactory[K, V](this) - implicit def buildFromAnyRefMap[K <: AnyRef, sealed V]: BuildFrom[AnyRefMap[_, _], (K, V), AnyRefMap[K, V]] = toBuildFrom(this) -} diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala b/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala deleted file mode 100644 index 8fa1e6edd566..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala +++ /dev/null @@ -1,406 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import java.util.Arrays - -import scala.annotation.nowarn -import scala.annotation.tailrec -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializable -import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures - -/** An implementation of the `Buffer` class using an array to - * represent the assembled sequence internally. Append, update and random - * access take constant time (amortized time). Prepends and removes are - * linear in the buffer size. - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#array-buffers "Scala's Collection Library overview"]] - * section on `Array Buffers` for more information. - - * - * @tparam A the type of this arraybuffer's elements. - * - * @define Coll `mutable.ArrayBuffer` - * @define coll array buffer - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(-1582447879429021880L) -class ArrayBuffer[sealed A] private (initialElements: Array[AnyRef], initialSize: Int) - extends AbstractBuffer[A] - with IndexedBuffer[A] - with IndexedSeqOps[A, ArrayBuffer, ArrayBuffer[A]] - with StrictOptimizedSeqOps[A, ArrayBuffer, ArrayBuffer[A]] - with IterableFactoryDefaults[A, ArrayBuffer] - with DefaultSerializable { - - def this() = this(new Array[AnyRef](ArrayBuffer.DefaultInitialSize), 0) - - def this(initialSize: Int) = this(new Array[AnyRef](initialSize max 1), 0) - - @transient private[this] var mutationCount: Int = 0 - - // needs to be `private[collection]` or `protected[collection]` for parallel-collections - protected[collection] var array: Array[AnyRef] = initialElements - protected var size0 = initialSize - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { - import scala.collection.convert.impl._ - shape.parUnbox(new ObjectArrayStepper(array, 0, length).asInstanceOf[AnyStepper[A] with EfficientSplit]) - } - - override def knownSize: Int = super[IndexedSeqOps].knownSize - - /** Ensure that the internal array has at least `n` cells. */ - protected def ensureSize(n: Int): Unit = { - array = ArrayBuffer.ensureSize(array, size0, n) - } - - // TODO 3.T: should be `protected`, perhaps `protected[this]` - /** Ensure that the internal array has at least `n` additional cells more than `size0`. */ - private[mutable] def ensureAdditionalSize(n: Int): Unit = { - // `.toLong` to ensure `Long` arithmetic is used and prevent `Int` overflow - array = ArrayBuffer.ensureSize(array, size0, size0.toLong + n) - } - - def sizeHint(size: Int): Unit = - if(size > length && size >= 1) ensureSize(size) - - /** Reduce length to `n`, nulling out all dropped elements */ - private def reduceToSize(n: Int): Unit = { - mutationCount += 1 - Arrays.fill(array, n, size0, null) - size0 = n - } - - /** Trims the ArrayBuffer to an appropriate size for the current - * number of elements (rounding up to the next natural size), - * which may replace the array by a shorter one. - * This allows releasing some unused memory. - */ - def trimToSize(): Unit = { - resize(length) - } - - /** Trims the `array` buffer size down to either a power of 2 - * or Int.MaxValue while keeping first `requiredLength` elements. - */ - private def resize(requiredLength: Int): Unit = - array = ArrayBuffer.downsize(array, requiredLength) - - @inline private def checkWithinBounds(lo: Int, hi: Int) = { - if (lo < 0) throw new IndexOutOfBoundsException(s"$lo is out of bounds (min 0, max ${size0 - 1})") - if (hi > size0) throw new IndexOutOfBoundsException(s"${hi - 1} is out of bounds (min 0, max ${size0 - 1})") - } - - def apply(n: Int): A = { - checkWithinBounds(n, n + 1) - array(n).asInstanceOf[A] - } - - def update(@deprecatedName("n", "2.13.0") index: Int, elem: A): Unit = { - checkWithinBounds(index, index + 1) - mutationCount += 1 - array(index) = elem.asInstanceOf[AnyRef] - } - - def length = size0 - - // TODO: return `IndexedSeqView` rather than `ArrayBufferView` - override def view: ArrayBufferView[A] = new ArrayBufferView(this, () => mutationCount) - - override def iterableFactory: SeqFactory[ArrayBuffer] = ArrayBuffer - - /** Note: This does not actually resize the internal representation. - * See clearAndShrink if you want to also resize internally - */ - def clear(): Unit = reduceToSize(0) - - /** - * Clears this buffer and shrinks to @param size (rounding up to the next - * natural size) - * @param size - */ - def clearAndShrink(size: Int = ArrayBuffer.DefaultInitialSize): this.type = { - clear() - resize(size) - this - } - - def addOne(elem: A): this.type = { - mutationCount += 1 - ensureAdditionalSize(1) - val oldSize = size0 - size0 = oldSize + 1 - this(oldSize) = elem - this - } - - // Overridden to use array copying for efficiency where possible. - override def addAll(elems: IterableOnce[A]^): this.type = { - elems match { - case elems: ArrayBuffer[_] => - val elemsLength = elems.size0 - if (elemsLength > 0) { - mutationCount += 1 - ensureAdditionalSize(elemsLength) - Array.copy(elems.array, 0, array, length, elemsLength) - size0 = length + elemsLength - } - case _ => super.addAll(elems) - } - this - } - - def insert(@deprecatedName("n", "2.13.0") index: Int, elem: A): Unit = { - checkWithinBounds(index, index) - mutationCount += 1 - ensureAdditionalSize(1) - Array.copy(array, index, array, index + 1, size0 - index) - size0 += 1 - this(index) = elem - } - - def prepend(elem: A): this.type = { - insert(0, elem) - this - } - - def insertAll(@deprecatedName("n", "2.13.0") index: Int, elems: IterableOnce[A]^): Unit = { - checkWithinBounds(index, index) - elems match { - case elems: collection.Iterable[A] => - val elemsLength = elems.size - if (elemsLength > 0) { - mutationCount += 1 - ensureAdditionalSize(elemsLength) - val len = size0 - Array.copy(array, index, array, index + elemsLength, len - index) - // if `elems eq this`, this copy is safe because - // - `elems.array eq this.array` - // - we didn't overwrite the values being inserted after moving them in - // the previous line - // - `copyElemsToArray` will call `System.arraycopy` - // - `System.arraycopy` will effectively "read" all the values before - // overwriting any of them when two arrays are the the same reference - val actual = IterableOnce.copyElemsToArray(elems, array.asInstanceOf[Array[Any]], index, elemsLength) - if (actual != elemsLength) throw new IllegalStateException(s"Copied $actual of $elemsLength") - size0 = len + elemsLength // update size AFTER the copy, in case we're inserting a proxy - } - case _ => insertAll(index, ArrayBuffer.from(elems)) - } - } - - /** Note: This does not actually resize the internal representation. - * See trimToSize if you want to also resize internally - */ - def remove(@deprecatedName("n", "2.13.0") index: Int): A = { - checkWithinBounds(index, index + 1) - val res = this(index) - Array.copy(array, index + 1, array, index, size0 - (index + 1)) - reduceToSize(size0 - 1) - res - } - - /** Note: This does not actually resize the internal representation. - * See trimToSize if you want to also resize internally - */ - def remove(@deprecatedName("n", "2.13.0") index: Int, count: Int): Unit = - if (count > 0) { - checkWithinBounds(index, index + count) - Array.copy(array, index + count, array, index, size0 - (index + count)) - reduceToSize(size0 - count) - } else if (count < 0) { - throw new IllegalArgumentException("removing negative number of elements: " + count) - } - - @deprecated("Use 'this' instance instead", "2.13.0") - @deprecatedOverriding("ArrayBuffer[A] no longer extends Builder[A, ArrayBuffer[A]]", "2.13.0") - @inline def result(): this.type = this - - @deprecated("Use 'new GrowableBuilder(this).mapResult(f)' instead", "2.13.0") - @deprecatedOverriding("ArrayBuffer[A] no longer extends Builder[A, ArrayBuffer[A]]", "2.13.0") - @inline def mapResult[NewTo](f: (ArrayBuffer[A]) => NewTo): Builder[A, NewTo]^{f} = new GrowableBuilder[A, ArrayBuffer[A]](this).mapResult(f) - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "ArrayBuffer" - - override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { - val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) - if(copied > 0) { - Array.copy(array, 0, xs, start, copied) - } - copied - } - - /** Sorts this $coll in place according to an Ordering. - * - * @see [[scala.collection.mutable.IndexedSeqOps.sortInPlace]] - * @param ord the ordering to be used to compare elements. - * @return modified input $coll sorted according to the ordering `ord`. - */ - override def sortInPlace[B >: A]()(implicit ord: Ordering[B]): this.type = { - if (length > 1) { - mutationCount += 1 - scala.util.Sorting.stableSort(array.asInstanceOf[Array[B @uncheckedCaptures]], 0, length) - } - this - } - - @tailrec private def foldl[B](start: Int, end: Int, z: B, op: (B, A) => B): B = - if (start == end) z - else foldl(start + 1, end, op(z, array(start).asInstanceOf[A]), op) - - @tailrec private def foldr[B](start: Int, end: Int, z: B, op: (A, B) => B): B = - if (start == end) z - else foldr(start, end - 1, op(array(end - 1).asInstanceOf[A], z), op) - - override def foldLeft[B](z: B)(op: (B, A) => B): B = foldl(0, length, z, op) - - override def foldRight[B](z: B)(op: (A, B) => B): B = foldr(0, length, z, op) - - override def reduceLeft[B >: A](op: (B, A) => B): B = if (length > 0) foldl(1, length, array(0).asInstanceOf[B], op) else super.reduceLeft(op) - - override def reduceRight[B >: A](op: (A, B) => B): B = if (length > 0) foldr(0, length - 1, array(length - 1).asInstanceOf[B], op) else super.reduceRight(op) -} - -/** - * Factory object for the `ArrayBuffer` class. - * - * $factoryInfo - * - * @define coll array buffer - * @define Coll `mutable.ArrayBuffer` - */ -@SerialVersionUID(3L) -object ArrayBuffer extends StrictOptimizedSeqFactory[ArrayBuffer] { - final val DefaultInitialSize = 16 - private[this] val emptyArray = new Array[AnyRef](0) - - def from[sealed B](coll: collection.IterableOnce[B]^): ArrayBuffer[B] = { - val k = coll.knownSize - if (k >= 0) { - // Avoid reallocation of buffer if length is known - val array = ensureSize(emptyArray, 0, k) // don't duplicate sizing logic, and check VM array size limit - val actual = IterableOnce.copyElemsToArray(coll, array.asInstanceOf[Array[Any]]) - if (actual != k) throw new IllegalStateException(s"Copied $actual of $k") - new ArrayBuffer[B](array, k) - } - else new ArrayBuffer[B] ++= coll - } - - def newBuilder[sealed A]: Builder[A, ArrayBuffer[A]] = - new GrowableBuilder[A, ArrayBuffer[A]](empty) { - override def sizeHint(size: Int): Unit = elems.ensureSize(size) - } - - def empty[sealed A]: ArrayBuffer[A] = new ArrayBuffer[A]() - - /** - * @param arrayLen the length of the backing array - * @param targetLen the minimum length to resize up to - * @return -1 if no resizing is needed, or the size for the new array otherwise - */ - private def resizeUp(arrayLen: Long, targetLen: Long): Int = { - if (targetLen <= arrayLen) -1 - else { - if (targetLen > Int.MaxValue) throw new Exception(s"Collections cannot have more than ${Int.MaxValue} elements") - IterableOnce.checkArraySizeWithinVMLimit(targetLen.toInt) // safe because `targetSize <= Int.MaxValue` - - val newLen = math.max(targetLen, math.max(arrayLen * 2, DefaultInitialSize)) - math.min(newLen, scala.runtime.PStatics.VM_MaxArraySize).toInt - } - } - // if necessary, copy (curSize elements of) the array to a new array of capacity n. - // Should use Array.copyOf(array, resizeEnsuring(array.length))? - private def ensureSize(array: Array[AnyRef], curSize: Int, targetSize: Long): Array[AnyRef] = { - val newLen = resizeUp(array.length, targetSize) - if (newLen < 0) array - else { - val res = new Array[AnyRef](newLen) - System.arraycopy(array, 0, res, 0, curSize) - res - } - } - - /** - * @param arrayLen the length of the backing array - * @param targetLen the length to resize down to, if smaller than `arrayLen` - * @return -1 if no resizing is needed, or the size for the new array otherwise - */ - private def resizeDown(arrayLen: Int, targetLen: Int): Int = - if (targetLen >= arrayLen) -1 else math.max(targetLen, 0) - private def downsize(array: Array[AnyRef], targetSize: Int): Array[AnyRef] = { - val newLen = resizeDown(array.length, targetSize) - if (newLen < 0) array - else if (newLen == 0) emptyArray - else { - val res = new Array[AnyRef](newLen) - System.arraycopy(array, 0, res, 0, targetSize) - res - } - } -} - -// TODO: use `CheckedIndexedSeqView.Id` once we can change the return type of `ArrayBuffer#view` -final class ArrayBufferView[sealed A] private[mutable](underlying: ArrayBuffer[A], mutationCount: () -> Int) - extends AbstractIndexedSeqView[A], Pure { - /* Removed since it poses problems for capture checking - @deprecated("never intended to be public; call ArrayBuffer#view instead", since = "2.13.7") - def this(array: Array[AnyRef], length: Int) = { - // this won't actually track mutation, but it would be a pain to have the implementation - // check if we have a method to get the current mutation count or not on every method and - // change what it does based on that. hopefully no one ever calls this. - this({ - val _array: Array[Object] = array - val _length = length - new ArrayBuffer[A](0) { - this.array = _array - this.size0 = _length - }: ArrayBuffer[A] - }, () => 0) - }*/ - - @deprecated("never intended to be public", since = "2.13.7") - def array: Array[AnyRef] = underlying.toArray[Any].asInstanceOf[Array[AnyRef]] - - @throws[IndexOutOfBoundsException] - def apply(n: Int): A = underlying(n) - def length: Int = underlying.length - override protected[this] def className = "ArrayBufferView" - - // we could inherit all these from `CheckedIndexedSeqView`, except this class is public - override def iterator: Iterator[A] = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) - override def reverseIterator: Iterator[A] = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) - - override def appended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) - override def prepended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) - override def take(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Take(this, n)(mutationCount) - override def takeRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) - override def drop(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) - override def dropRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) - override def map[B](f: A => B): IndexedSeqView[B]^{f} = new CheckedIndexedSeqView.Map(this, f)(mutationCount) - override def reverse: IndexedSeqView[A] = new CheckedIndexedSeqView.Reverse(this)(mutationCount) - override def slice(from: Int, until: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) - override def tapEach[U](f: A => U): IndexedSeqView[A]^{f} = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) - - override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) - override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) - override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) -} diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala deleted file mode 100644 index 0620d3d23061..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala +++ /dev/null @@ -1,523 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import language.experimental.captureChecking -import scala.reflect.ClassTag - -/** A builder class for arrays. - * - * @tparam T the type of the elements for the builder. - */ -@SerialVersionUID(3L) -sealed abstract class ArrayBuilder[sealed T] - extends ReusableBuilder[T, Array[T]] - with Serializable { - protected[this] var capacity: Int = 0 - protected[this] def elems: Array[T] - protected var size: Int = 0 - - def length: Int = size - - override def knownSize: Int = size - - protected[this] final def ensureSize(size: Int): Unit = { - if (capacity < size || capacity == 0) { - var newsize = if (capacity == 0) 16 else capacity * 2 - while (newsize < size) newsize *= 2 - resize(newsize) - } - } - - override final def sizeHint(size: Int): Unit = - if (capacity < size) resize(size) - - def clear(): Unit = size = 0 - - protected[this] def resize(size: Int): Unit - - /** Add all elements of an array */ - def addAll(xs: Array[_ <: T]): this.type = addAll(xs, 0, xs.length) - - /** Add a slice of an array */ - def addAll(xs: Array[_ <: T], offset: Int, length: Int): this.type = { - ensureSize(this.size + length) - Array.copy(xs, offset, elems, this.size, length) - size += length - this - } - - override def addAll(xs: IterableOnce[T]^): this.type = { - val k = xs.knownSize - if (k > 0) { - ensureSize(this.size + k) - val actual = IterableOnce.copyElemsToArray(xs, elems, this.size) - if (actual != k) throw new IllegalStateException(s"Copied $actual of $k") - size += k - } else if (k < 0) super.addAll(xs) - this - } -} - -/** A companion object for array builders. - */ -object ArrayBuilder { - - /** Creates a new arraybuilder of type `T`. - * - * @tparam T type of the elements for the array builder, with a `ClassTag` context bound. - * @return a new empty array builder. - */ - @inline def make[T: ClassTag]: ArrayBuilder[T] = { - val tag = implicitly[ClassTag[T]] - tag.runtimeClass match { - case java.lang.Byte.TYPE => new ArrayBuilder.ofByte().asInstanceOf[ArrayBuilder[T]] - case java.lang.Short.TYPE => new ArrayBuilder.ofShort().asInstanceOf[ArrayBuilder[T]] - case java.lang.Character.TYPE => new ArrayBuilder.ofChar().asInstanceOf[ArrayBuilder[T]] - case java.lang.Integer.TYPE => new ArrayBuilder.ofInt().asInstanceOf[ArrayBuilder[T]] - case java.lang.Long.TYPE => new ArrayBuilder.ofLong().asInstanceOf[ArrayBuilder[T]] - case java.lang.Float.TYPE => new ArrayBuilder.ofFloat().asInstanceOf[ArrayBuilder[T]] - case java.lang.Double.TYPE => new ArrayBuilder.ofDouble().asInstanceOf[ArrayBuilder[T]] - case java.lang.Boolean.TYPE => new ArrayBuilder.ofBoolean().asInstanceOf[ArrayBuilder[T]] - case java.lang.Void.TYPE => new ArrayBuilder.ofUnit().asInstanceOf[ArrayBuilder[T]] - case _ => new ArrayBuilder.ofRef[T with AnyRef]()(tag.asInstanceOf[ClassTag[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]] - } - } - - /** A class for array builders for arrays of reference types. - * - * This builder can be reused. - * - * @tparam T type of elements for the array builder, subtype of `AnyRef` with a `ClassTag` context bound. - */ - @SerialVersionUID(3L) - final class ofRef[T <: AnyRef](implicit ct: ClassTag[T]) extends ArrayBuilder[T] { - - protected var elems: Array[T] = _ - - private def mkArray(size: Int): Array[T] = { - if (capacity == size && capacity > 0) elems - else if (elems eq null) new Array[T](size) - else java.util.Arrays.copyOf[T](elems, size) - } - - protected[this] def resize(size: Int): Unit = { - elems = mkArray(size) - capacity = size - } - - def addOne(elem: T): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - def result(): Array[T] = { - if (capacity != 0 && capacity == size) { - capacity = 0 - val res = elems - elems = null - res - } - else mkArray(size) - } - - override def clear(): Unit = { - super.clear() - if(elems ne null) java.util.Arrays.fill(elems.asInstanceOf[Array[AnyRef]], null) - } - - override def equals(other: Any): Boolean = other match { - case x: ofRef[_] => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofRef" - } - - /** A class for array builders for arrays of `byte`s. It can be reused. */ - @SerialVersionUID(3L) - final class ofByte extends ArrayBuilder[Byte] { - - protected var elems: Array[Byte] = _ - - private def mkArray(size: Int): Array[Byte] = { - val newelems = new Array[Byte](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - protected[this] def resize(size: Int): Unit = { - elems = mkArray(size) - capacity = size - } - - def addOne(elem: Byte): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - def result(): Array[Byte] = { - if (capacity != 0 && capacity == size) { - capacity = 0 - val res = elems - elems = null - res - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofByte => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofByte" - } - - /** A class for array builders for arrays of `short`s. It can be reused. */ - @SerialVersionUID(3L) - final class ofShort extends ArrayBuilder[Short] { - - protected var elems: Array[Short] = _ - - private def mkArray(size: Int): Array[Short] = { - val newelems = new Array[Short](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - protected[this] def resize(size: Int): Unit = { - elems = mkArray(size) - capacity = size - } - - def addOne(elem: Short): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - def result(): Array[Short] = { - if (capacity != 0 && capacity == size) { - capacity = 0 - val res = elems - elems = null - res - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofShort => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofShort" - } - - /** A class for array builders for arrays of `char`s. It can be reused. */ - @SerialVersionUID(3L) - final class ofChar extends ArrayBuilder[Char] { - - protected var elems: Array[Char] = _ - - private def mkArray(size: Int): Array[Char] = { - val newelems = new Array[Char](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - protected[this] def resize(size: Int): Unit = { - elems = mkArray(size) - capacity = size - } - - def addOne(elem: Char): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - def result(): Array[Char] = { - if (capacity != 0 && capacity == size) { - capacity = 0 - val res = elems - elems = null - res - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofChar => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofChar" - } - - /** A class for array builders for arrays of `int`s. It can be reused. */ - @SerialVersionUID(3L) - final class ofInt extends ArrayBuilder[Int] { - - protected var elems: Array[Int] = _ - - private def mkArray(size: Int): Array[Int] = { - val newelems = new Array[Int](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - protected[this] def resize(size: Int): Unit = { - elems = mkArray(size) - capacity = size - } - - def addOne(elem: Int): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - def result(): Array[Int] = { - if (capacity != 0 && capacity == size) { - capacity = 0 - val res = elems - elems = null - res - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofInt => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofInt" - } - - /** A class for array builders for arrays of `long`s. It can be reused. */ - @SerialVersionUID(3L) - final class ofLong extends ArrayBuilder[Long] { - - protected var elems: Array[Long] = _ - - private def mkArray(size: Int): Array[Long] = { - val newelems = new Array[Long](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - protected[this] def resize(size: Int): Unit = { - elems = mkArray(size) - capacity = size - } - - def addOne(elem: Long): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - def result(): Array[Long] = { - if (capacity != 0 && capacity == size) { - capacity = 0 - val res = elems - elems = null - res - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofLong => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofLong" - } - - /** A class for array builders for arrays of `float`s. It can be reused. */ - @SerialVersionUID(3L) - final class ofFloat extends ArrayBuilder[Float] { - - protected var elems: Array[Float] = _ - - private def mkArray(size: Int): Array[Float] = { - val newelems = new Array[Float](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - protected[this] def resize(size: Int): Unit = { - elems = mkArray(size) - capacity = size - } - - def addOne(elem: Float): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - def result(): Array[Float] = { - if (capacity != 0 && capacity == size) { - capacity = 0 - val res = elems - elems = null - res - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofFloat => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofFloat" - } - - /** A class for array builders for arrays of `double`s. It can be reused. */ - @SerialVersionUID(3L) - final class ofDouble extends ArrayBuilder[Double] { - - protected var elems: Array[Double] = _ - - private def mkArray(size: Int): Array[Double] = { - val newelems = new Array[Double](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - protected[this] def resize(size: Int): Unit = { - elems = mkArray(size) - capacity = size - } - - def addOne(elem: Double): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - def result(): Array[Double] = { - if (capacity != 0 && capacity == size) { - capacity = 0 - val res = elems - elems = null - res - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofDouble => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofDouble" - } - - /** A class for array builders for arrays of `boolean`s. It can be reused. */ - @SerialVersionUID(3L) - class ofBoolean extends ArrayBuilder[Boolean] { - - protected var elems: Array[Boolean] = _ - - private def mkArray(size: Int): Array[Boolean] = { - val newelems = new Array[Boolean](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems - } - - protected[this] def resize(size: Int): Unit = { - elems = mkArray(size) - capacity = size - } - - def addOne(elem: Boolean): this.type = { - ensureSize(size + 1) - elems(size) = elem - size += 1 - this - } - - def result(): Array[Boolean] = { - if (capacity != 0 && capacity == size) { - capacity = 0 - val res = elems - elems = null - res - } - else mkArray(size) - } - - override def equals(other: Any): Boolean = other match { - case x: ofBoolean => (size == x.size) && (elems == x.elems) - case _ => false - } - - override def toString = "ArrayBuilder.ofBoolean" - } - - /** A class for array builders for arrays of `Unit` type. It can be reused. */ - @SerialVersionUID(3L) - final class ofUnit extends ArrayBuilder[Unit] { - - protected def elems: Array[Unit] = throw new UnsupportedOperationException() - - def addOne(elem: Unit): this.type = { - size += 1 - this - } - - override def addAll(xs: IterableOnce[Unit]^): this.type = { - size += xs.iterator.size - this - } - - override def addAll(xs: Array[_ <: Unit], offset: Int, length: Int): this.type = { - size += length - this - } - - def result() = { - val ans = new Array[Unit](size) - var i = 0 - while (i < size) { ans(i) = (); i += 1 } - ans - } - - override def equals(other: Any): Boolean = other match { - case x: ofUnit => (size == x.size) - case _ => false - } - - protected[this] def resize(size: Int): Unit = () - - override def toString = "ArrayBuilder.ofUnit" - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala b/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala deleted file mode 100644 index f22aacec65c5..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala +++ /dev/null @@ -1,646 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.annotation.nowarn -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializable -import scala.reflect.ClassTag -import language.experimental.captureChecking - -/** An implementation of a double-ended queue that internally uses a resizable circular buffer. - * - * Append, prepend, removeHead, removeLast and random-access (indexed-lookup and indexed-replacement) - * take amortized constant time. In general, removals and insertions at i-th index are O(min(i, n-i)) - * and thus insertions and removals from end/beginning are fast. - * - * @note Subclasses ''must'' override the `ofArray` protected method to return a more specific type. - * - * @tparam A the type of this ArrayDeque's elements. - * - * @define Coll `mutable.ArrayDeque` - * @define coll array deque - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -class ArrayDeque[sealed A] protected ( - protected var array: Array[AnyRef], - private[ArrayDeque] var start: Int, - private[ArrayDeque] var end: Int -) extends AbstractBuffer[A] - with IndexedBuffer[A] - with IndexedSeqOps[A, ArrayDeque, ArrayDeque[A]] - with StrictOptimizedSeqOps[A, ArrayDeque, ArrayDeque[A]] - with IterableFactoryDefaults[A, ArrayDeque] - with ArrayDequeOps[A, ArrayDeque, ArrayDeque[A]] - with Cloneable[ArrayDeque[A]] - with DefaultSerializable { - - reset(array, start, end) - - private[this] def reset(array: Array[AnyRef], start: Int, end: Int) = { - assert((array.length & (array.length - 1)) == 0, s"Array.length must be power of 2") - requireBounds(idx = start, until = array.length) - requireBounds(idx = end, until = array.length) - this.array = array - this.start = start - this.end = end - } - - def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = this(ArrayDeque.alloc(initialSize), start = 0, end = 0) - - override def knownSize: Int = super[IndexedSeqOps].knownSize - - // No-Op override to allow for more efficient stepper in a minor release. - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = super.stepper(shape) - - def apply(idx: Int): A = { - requireBounds(idx) - _get(idx) - } - - def update(idx: Int, elem: A): Unit = { - requireBounds(idx) - _set(idx, elem) - } - - def addOne(elem: A): this.type = { - ensureSize(length + 1) - appendAssumingCapacity(elem) - } - - def prepend(elem: A): this.type = { - ensureSize(length + 1) - prependAssumingCapacity(elem) - } - - @inline private[ArrayDeque] def appendAssumingCapacity(elem: A): this.type = { - array(end) = elem.asInstanceOf[AnyRef] - end = end_+(1) - this - } - - @inline private[ArrayDeque] def prependAssumingCapacity(elem: A): this.type = { - start = start_-(1) - array(start) = elem.asInstanceOf[AnyRef] - this - } - - override def prependAll(elems: IterableOnce[A]^): this.type = { - val it = elems.iterator - if (it.nonEmpty) { - val n = length - // The following code resizes the current collection at most once and traverses elems at most twice - elems.knownSize match { - // Size is too expensive to compute AND we can traverse it only once - can't do much but retry with an IndexedSeq - case srcLength if srcLength < 0 => prependAll(it.to(IndexedSeq: Factory[A, IndexedSeq[A]] /* type ascription needed by Dotty */)) - - // We know for sure we need to resize to hold everything, might as well resize and memcopy upfront - case srcLength if mustGrow(srcLength + n) => - val finalLength = srcLength + n - val array2 = ArrayDeque.alloc(finalLength) - it.copyToArray(array2.asInstanceOf[Array[A]]) - copySliceToArray(srcStart = 0, dest = array2, destStart = srcLength, maxItems = n) - reset(array = array2, start = 0, end = finalLength) - - // Just fill up from (start - srcLength) to (start - 1) and move back start - case srcLength => - // Optimized version of `elems.zipWithIndex.foreach((elem, i) => _set(i - srcLength, elem))` - var i = 0 - while(i < srcLength) { - _set(i - srcLength, it.next()) - i += 1 - } - start = start_-(srcLength) - } - } - this - } - - override def addAll(elems: IterableOnce[A]^): this.type = { - elems.knownSize match { - case srcLength if srcLength > 0 => - ensureSize(srcLength + length) - elems.iterator.foreach(appendAssumingCapacity) - case _ => elems.iterator.foreach(+=) - } - this - } - - def insert(idx: Int, elem: A): Unit = { - requireBounds(idx, length+1) - val n = length - if (idx == 0) { - prepend(elem) - } else if (idx == n) { - addOne(elem) - } else { - val finalLength = n + 1 - if (mustGrow(finalLength)) { - val array2 = ArrayDeque.alloc(finalLength) - copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx) - array2(idx) = elem.asInstanceOf[AnyRef] - copySliceToArray(srcStart = idx, dest = array2, destStart = idx + 1, maxItems = n) - reset(array = array2, start = 0, end = finalLength) - } else if (n <= idx * 2) { - var i = n - 1 - while(i >= idx) { - _set(i + 1, _get(i)) - i -= 1 - } - end = end_+(1) - i += 1 - _set(i, elem) - } else { - var i = 0 - while(i < idx) { - _set(i - 1, _get(i)) - i += 1 - } - start = start_-(1) - _set(i, elem) - } - } - } - - def insertAll(idx: Int, elems: IterableOnce[A]^): Unit = { - requireBounds(idx, length+1) - val n = length - if (idx == 0) { - prependAll(elems) - } else if (idx == n) { - addAll(elems) - } else { - // Get both an iterator and the length of the source (by copying the source to an IndexedSeq if needed) - val (it, srcLength) = { - val _srcLength = elems.knownSize - if (_srcLength >= 0) (elems.iterator, _srcLength) - else { - val indexed = IndexedSeq.from(elems) - (indexed.iterator, indexed.size) - } - } - if (it.nonEmpty) { - val finalLength = srcLength + n - // Either we resize right away or move prefix left or suffix right - if (mustGrow(finalLength)) { - val array2 = ArrayDeque.alloc(finalLength) - copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx) - it.copyToArray(array2.asInstanceOf[Array[A]], idx) - copySliceToArray(srcStart = idx, dest = array2, destStart = idx + srcLength, maxItems = n) - reset(array = array2, start = 0, end = finalLength) - } else if (2*idx >= n) { // Cheaper to shift the suffix right - var i = n - 1 - while(i >= idx) { - _set(i + srcLength, _get(i)) - i -= 1 - } - end = end_+(srcLength) - while(it.hasNext) { - i += 1 - _set(i, it.next()) - } - } else { // Cheaper to shift prefix left - var i = 0 - while(i < idx) { - _set(i - srcLength, _get(i)) - i += 1 - } - start = start_-(srcLength) - while(it.hasNext) { - _set(i, it.next()) - i += 1 - } - } - } - } - } - - def remove(idx: Int, count: Int): Unit = { - if (count > 0) { - requireBounds(idx) - val n = length - val removals = Math.min(n - idx, count) - val finalLength = n - removals - val suffixStart = idx + removals - // If we know we can resize after removing, do it right away using arrayCopy - // Else, choose the shorter: either move the prefix (0 until idx) right OR the suffix (idx+removals until n) left - if (shouldShrink(finalLength)) { - val array2 = ArrayDeque.alloc(finalLength) - copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx) - copySliceToArray(srcStart = suffixStart, dest = array2, destStart = idx, maxItems = n) - reset(array = array2, start = 0, end = finalLength) - } else if (2*idx <= finalLength) { // Cheaper to move the prefix right - var i = suffixStart - 1 - while(i >= removals) { - _set(i, _get(i - removals)) - i -= 1 - } - while(i >= 0) { - _set(i, null.asInstanceOf[A]) - i -= 1 - } - start = start_+(removals) - } else { // Cheaper to move the suffix left - var i = idx - while(i < finalLength) { - _set(i, _get(i + removals)) - i += 1 - } - while(i < n) { - _set(i, null.asInstanceOf[A]) - i += 1 - } - end = end_-(removals) - } - } else { - require(count == 0, s"removing negative number of elements: $count") - } - } - - def remove(idx: Int): A = { - val elem = this(idx) - remove(idx, 1) - elem - } - - override def subtractOne(elem: A): this.type = { - val idx = indexOf(elem) - if (idx >= 0) remove(idx, 1) //TODO: SeqOps should be fluent API - this - } - - /** - * - * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while - * @return - */ - def removeHeadOption(resizeInternalRepr: Boolean = false): Option[A] = - if (isEmpty) None else Some(removeHeadAssumingNonEmpty(resizeInternalRepr)) - - /** - * Unsafely remove the first element (throws exception when empty) - * See also removeHeadOption() - * - * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while - * @throws NoSuchElementException when empty - * @return - */ - def removeHead(resizeInternalRepr: Boolean = false): A = - if (isEmpty) throw new NoSuchElementException(s"empty collection") else removeHeadAssumingNonEmpty(resizeInternalRepr) - - @inline private[this] def removeHeadAssumingNonEmpty(resizeInternalRepr: Boolean = false): A = { - val elem = array(start) - array(start) = null - start = start_+(1) - if (resizeInternalRepr) resize(length) - elem.asInstanceOf[A] - } - - /** - * - * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while - * @return - */ - def removeLastOption(resizeInternalRepr: Boolean = false): Option[A] = - if (isEmpty) None else Some(removeLastAssumingNonEmpty(resizeInternalRepr)) - - /** - * Unsafely remove the last element (throws exception when empty) - * See also removeLastOption() - * - * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while - * @throws NoSuchElementException when empty - * @return - */ - def removeLast(resizeInternalRepr: Boolean = false): A = - if (isEmpty) throw new NoSuchElementException(s"empty collection") else removeLastAssumingNonEmpty(resizeInternalRepr) - - @`inline` private[this] def removeLastAssumingNonEmpty(resizeInternalRepr: Boolean = false): A = { - end = end_-(1) - val elem = array(end) - array(end) = null - if (resizeInternalRepr) resize(length) - elem.asInstanceOf[A] - } - - /** - * Remove all elements from this collection and return the elements while emptying this data structure - * @return - */ - def removeAll(): scala.collection.immutable.Seq[A] = { - val elems = scala.collection.immutable.Seq.newBuilder[A] - elems.sizeHint(length) - while(nonEmpty) { - elems += removeHeadAssumingNonEmpty() - } - elems.result() - } - - /** - * Remove all elements from this collection and return the elements in reverse while emptying this data structure - * @return - */ - def removeAllReverse(): scala.collection.immutable.Seq[A] = { - val elems = scala.collection.immutable.Seq.newBuilder[A] - elems.sizeHint(length) - while(nonEmpty) { - elems += removeLastAssumingNonEmpty() - } - elems.result() - } - - /** - * Returns and removes all elements from the left of this queue which satisfy the given predicate - * - * @param f the predicate used for choosing elements - * @return - */ - def removeHeadWhile(f: A => Boolean): scala.collection.immutable.Seq[A] = { - val elems = scala.collection.immutable.Seq.newBuilder[A] - while(headOption.exists(f)) { - elems += removeHeadAssumingNonEmpty() - } - elems.result() - } - - /** - * Returns and removes all elements from the right of this queue which satisfy the given predicate - * - * @param f the predicate used for choosing elements - * @return - */ - def removeLastWhile(f: A => Boolean): scala.collection.immutable.Seq[A] = { - val elems = scala.collection.immutable.Seq.newBuilder[A] - while(lastOption.exists(f)) { - elems += removeLastAssumingNonEmpty() - } - elems.result() - } - - /** Returns the first element which satisfies the given predicate after or at some start index - * and removes this element from the collections - * - * @param p the predicate used for choosing the first element - * @param from the start index - * @return the first element of the queue for which p yields true - */ - def removeFirst(p: A => Boolean, from: Int = 0): Option[A] = { - val i = indexWhere(p, from) - if (i < 0) None else Some(remove(i)) - } - - /** Returns all elements in this collection which satisfy the given predicate - * and removes those elements from this collections. - * - * @param p the predicate used for choosing elements - * @return a sequence of all elements in the queue for which - * p yields true. - */ - def removeAll(p: A => Boolean): scala.collection.immutable.Seq[A] = { - val res = scala.collection.immutable.Seq.newBuilder[A] - var i, j = 0 - while (i < size) { - if (p(this(i))) { - res += this(i) - } else { - if (i != j) { - this(j) = this(i) - } - j += 1 - } - i += 1 - } - if (i != j) takeInPlace(j) - res.result() - } - - @inline def ensureSize(hint: Int) = if (hint > length && mustGrow(hint)) resize(hint) - - def length = end_-(start) - - override def isEmpty = start == end - - override protected def klone(): ArrayDeque[A] = new ArrayDeque(array.clone(), start = start, end = end) - - override def iterableFactory: SeqFactory[ArrayDeque] = ArrayDeque - - /** - * Note: This does not actually resize the internal representation. - * See clearAndShrink if you want to also resize internally - */ - def clear(): Unit = { - while(nonEmpty) { - removeHeadAssumingNonEmpty() - } - } - - /** - * Clears this buffer and shrinks to @param size - * - * @param size - * @return - */ - def clearAndShrink(size: Int = ArrayDeque.DefaultInitialSize): this.type = { - reset(array = ArrayDeque.alloc(size), start = 0, end = 0) - this - } - - protected def ofArray(array: Array[AnyRef], end: Int): ArrayDeque[A] = - new ArrayDeque[A](array, start = 0, end) - - override def copyToArray[sealed B >: A](dest: Array[B], destStart: Int, len: Int): Int = { - val copied = IterableOnce.elemsToCopyToArray(length, dest.length, destStart, len) - if (copied > 0) { - copySliceToArray(srcStart = 0, dest = dest, destStart = destStart, maxItems = len) - } - copied - } - - override def toArray[sealed B >: A: ClassTag]: Array[B] = - copySliceToArray(srcStart = 0, dest = new Array[B](length), destStart = 0, maxItems = length) - - /** - * Trims the capacity of this ArrayDeque's instance to be the current size - */ - def trimToSize(): Unit = resize(length) - - // Utils for common modular arithmetic: - @inline protected def start_+(idx: Int) = (start + idx) & (array.length - 1) - @inline private[this] def start_-(idx: Int) = (start - idx) & (array.length - 1) - @inline private[this] def end_+(idx: Int) = (end + idx) & (array.length - 1) - @inline private[this] def end_-(idx: Int) = (end - idx) & (array.length - 1) - - // Note: here be overflow dragons! This is used for int overflow - // assumptions in resize(). Use caution changing. - @inline private[this] def mustGrow(len: Int) = { - len >= array.length - } - - // Assumes that 0 <= len < array.length! - @inline private[this] def shouldShrink(len: Int) = { - // To avoid allocation churn, only shrink when array is large - // and less than 2/5 filled. - array.length > ArrayDeque.StableSize && array.length - len - (len >> 1) > len - } - - // Assumes that 0 <= len < array.length! - @inline private[this] def canShrink(len: Int) = { - array.length > ArrayDeque.DefaultInitialSize && array.length - len > len - } - - @inline private[this] def _get(idx: Int): A = array(start_+(idx)).asInstanceOf[A] - - @inline private[this] def _set(idx: Int, elem: A) = array(start_+(idx)) = elem.asInstanceOf[AnyRef] - - // Assumes that 0 <= len. - private[this] def resize(len: Int) = if (mustGrow(len) || canShrink(len)) { - val n = length - val array2 = copySliceToArray(srcStart = 0, dest = ArrayDeque.alloc(len), destStart = 0, maxItems = n) - reset(array = array2, start = 0, end = n) - } - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "ArrayDeque" -} - -/** - * $factoryInfo - * @define coll array deque - * @define Coll `ArrayDeque` - */ -@SerialVersionUID(3L) -object ArrayDeque extends StrictOptimizedSeqFactory[ArrayDeque] { - - def from[sealed B](coll: collection.IterableOnce[B]^): ArrayDeque[B] = { - val s = coll.knownSize - if (s >= 0) { - val array = alloc(s) - val actual = IterableOnce.copyElemsToArray(coll, array.asInstanceOf[Array[Any]]) - if (actual != s) throw new IllegalStateException(s"Copied $actual of $s") - new ArrayDeque[B](array, start = 0, end = s) - } else new ArrayDeque[B]() ++= coll - } - - def newBuilder[sealed A]: Builder[A, ArrayDeque[A]] = - new GrowableBuilder[A, ArrayDeque[A]](empty) { - override def sizeHint(size: Int): Unit = { - elems.ensureSize(size) - } - } - - def empty[sealed A]: ArrayDeque[A] = new ArrayDeque[A]() - - final val DefaultInitialSize = 16 - - /** - * We try to not repeatedly resize arrays smaller than this - */ - private[ArrayDeque] final val StableSize = 128 - - /** - * Allocates an array whose size is next power of 2 > `len` - * Largest possible len is 1<<30 - 1 - * - * @param len - * @return - */ - private[mutable] def alloc(len: Int) = { - require(len >= 0, s"Non-negative array size required") - val size = (1 << 31) >>> java.lang.Integer.numberOfLeadingZeros(len) << 1 - require(size >= 0, s"ArrayDeque too big - cannot allocate ArrayDeque of length $len") - new Array[AnyRef](Math.max(size, DefaultInitialSize)) - } -} - -trait ArrayDequeOps[A, +CC[_], +C <: AnyRef] extends StrictOptimizedSeqOps[A, CC, C] { - protected def array: Array[AnyRef] - - final override def clone(): C = klone() - - protected def klone(): C - - protected def ofArray(array: Array[AnyRef], end: Int): C - - protected def start_+(idx: Int): Int - - @inline protected final def requireBounds(idx: Int, until: Int = length): Unit = - if (idx < 0 || idx >= until) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${until-1})") - - /** - * This is a more general version of copyToArray - this also accepts a srcStart unlike copyToArray - * This copies maxItems elements from this collections srcStart to dest's destStart - * If we reach the end of either collections before we could copy maxItems, we simply stop copying - * - * @param dest - * @param srcStart - * @param destStart - * @param maxItems - */ - def copySliceToArray(srcStart: Int, dest: Array[_], destStart: Int, maxItems: Int): dest.type = { - requireBounds(destStart, dest.length+1) - val toCopy = Math.min(maxItems, Math.min(length - srcStart, dest.length - destStart)) - if (toCopy > 0) { - requireBounds(srcStart) - val startIdx = start_+(srcStart) - val block1 = Math.min(toCopy, array.length - startIdx) - Array.copy(src = array, srcPos = startIdx, dest = dest, destPos = destStart, length = block1) - val block2 = toCopy - block1 - if (block2 > 0) Array.copy(src = array, srcPos = 0, dest = dest, destPos = destStart + block1, length = block2) - } - dest - } - - override def reverse: C = { - val n = length - val arr = ArrayDeque.alloc(n) - var i = 0 - while(i < n) { - arr(i) = this(n - i - 1).asInstanceOf[AnyRef] - i += 1 - } - ofArray(arr, n) - } - - override def slice(from: Int, until: Int): C = { - val n = length - val left = Math.max(0, Math.min(n, from)) - val right = Math.max(0, Math.min(n, until)) - val len = right - left - if (len <= 0) { - empty - } else if (len >= n) { - klone() - } else { - val array2 = copySliceToArray(srcStart = left, dest = ArrayDeque.alloc(len), destStart = 0, maxItems = len) - ofArray(array2, len) - } - } - - override def sliding(window: Int, step: Int): Iterator[C] = { - require(window > 0 && step > 0, s"window=$window and step=$step, but both must be positive") - length match { - case 0 => Iterator.empty - case n if n <= window => Iterator.single(slice(0, length)) - case n => - val lag = if (window > step) window - step else 0 - Iterator.range(start = 0, end = n - lag, step = step).map(i => slice(i, i + window)) - } - } - - override def grouped(n: Int): Iterator[C] = sliding(n, n) -} diff --git a/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala deleted file mode 100644 index bd3a208a94c0..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala +++ /dev/null @@ -1,351 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import java.util.Arrays - -import scala.collection.Stepper.EfficientSplit -import scala.collection.convert.impl._ -import scala.reflect.ClassTag -import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking -import annotation.unchecked.uncheckedCaptures - -/** - * A collection representing `Array[T]`. Unlike `ArrayBuffer` it is always backed by the same - * underlying `Array`, therefore it is not growable or shrinkable. - * - * @tparam T type of the elements in this wrapped array. - * - * @define Coll `ArraySeq` - * @define coll wrapped array - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(3L) -sealed abstract class ArraySeq[sealed T] - extends AbstractSeq[T] - with IndexedSeq[T] - with IndexedSeqOps[T, ArraySeq, ArraySeq[T]] - with StrictOptimizedSeqOps[T, ArraySeq, ArraySeq[T]] - with Serializable - with Pure { - - override def iterableFactory: scala.collection.SeqFactory[ArraySeq] = ArraySeq.untagged - - override protected def fromSpecific(coll: scala.collection.IterableOnce[T]^): ArraySeq[T] = { - val b = ArrayBuilder.make(elemTag).asInstanceOf[ArrayBuilder[T]] - val s = coll.knownSize - if(s > 0) b.sizeHint(s) - b ++= coll - ArraySeq.make(b.result()) - } - override protected def newSpecificBuilder: Builder[T, ArraySeq[T]] = - ArraySeq.newBuilder[T](elemTag.asInstanceOf[ClassTag[T]]).asInstanceOf[Builder[T, ArraySeq[T]]] - override def empty: ArraySeq[T] = ArraySeq.empty(elemTag.asInstanceOf[ClassTag[T]]) - - /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive - * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype - * or subtype of the element type. */ - def elemTag: ClassTag[_] - - /** Update element at given index */ - def update(@deprecatedName("idx", "2.13.0") index: Int, elem: T): Unit - - /** The underlying array. Its element type does not have to be equal to the element type of this ArraySeq. A primitive - * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype - * or subtype of the element type. */ - def array: Array[_] - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit - - override protected[this] def className = "ArraySeq" - - /** Clones this object, including the underlying Array. */ - override def clone(): ArraySeq[T] = ArraySeq.make[T](array.clone().asInstanceOf[Array[T]]) - - override def copyToArray[sealed B >: T](xs: Array[B], start: Int, len: Int): Int = { - val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) - if(copied > 0) { - Array.copy(array, 0, xs, start, copied) - } - copied - } - - override def equals(other: Any): Boolean = other match { - case that: ArraySeq[_] if this.array.length != that.array.length => - false - case _ => - super.equals(other) - } - - override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq[T] = - ArraySeq.make(array.asInstanceOf[Array[T]].sorted(ord.asInstanceOf[Ordering[Any]])).asInstanceOf[ArraySeq[T]] - - override def sortInPlace[B >: T]()(implicit ord: Ordering[B]): this.type = { - if (length > 1) scala.util.Sorting.stableSort(array.asInstanceOf[Array[B @uncheckedCaptures]]) - this - } -} - -/** A companion object used to create instances of `ArraySeq`. - */ -@SerialVersionUID(3L) -object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => - val untagged: SeqFactory[ArraySeq] = new ClassTagSeqFactory.AnySeqDelegate(self) - - // This is reused for all calls to empty. - private[this] val EmptyArraySeq = new ofRef[AnyRef](new Array[AnyRef](0)) - def empty[T : ClassTag]: ArraySeq[T] = EmptyArraySeq.asInstanceOf[ArraySeq[T]] - - def from[sealed A : ClassTag](it: scala.collection.IterableOnce[A]^): ArraySeq[A] = make(Array.from[A](it)) - - def newBuilder[sealed A : ClassTag]: Builder[A, ArraySeq[A]] = ArrayBuilder.make[A].mapResult(make) - - /** - * Wrap an existing `Array` into a `ArraySeq` of the proper primitive specialization type - * without copying. - * - * Note that an array containing boxed primitives can be converted to a `ArraySeq` without - * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime, - * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast: - * `ArraySeq.make(a).asInstanceOf[ArraySeq[Int]]`. The values are still - * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing - * `ArraySeq.make(a.asInstanceOf[Array[Int]])` does not work, it throws a `ClassCastException` - * at runtime. - */ - def make[sealed T](x: Array[T]): ArraySeq[T] = ((x.asInstanceOf[Array[_]]: @unchecked) match { - case null => null - case x: Array[AnyRef] => new ofRef[AnyRef](x) - case x: Array[Int] => new ofInt(x) - case x: Array[Double] => new ofDouble(x) - case x: Array[Long] => new ofLong(x) - case x: Array[Float] => new ofFloat(x) - case x: Array[Char] => new ofChar(x) - case x: Array[Byte] => new ofByte(x) - case x: Array[Short] => new ofShort(x) - case x: Array[Boolean] => new ofBoolean(x) - case x: Array[Unit] => new ofUnit(x) - }).asInstanceOf[ArraySeq[T]] - - @SerialVersionUID(3L) - final class ofRef[T <: AnyRef](val array: Array[T]) extends ArraySeq[T] { - def elemTag = ClassTag[T](array.getClass.getComponentType) - def length: Int = array.length - def apply(index: Int): T = array(index) - def update(index: Int, elem: T): Unit = { array(index) = elem } - override def hashCode = MurmurHash3.arraySeqHash(array) - override def equals(that: Any) = that match { - case that: ofRef[_] => - Array.equals( - this.array.asInstanceOf[Array[AnyRef]], - that.array.asInstanceOf[Array[AnyRef]]) - case _ => super.equals(that) - } - override def iterator: Iterator[T] = new ArrayOps.ArrayIterator[T](array) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - new ObjectArrayStepper(array, 0, array.length) - else shape.parUnbox(new ObjectArrayStepper(array, 0, array.length).asInstanceOf[AnyStepper[T] with EfficientSplit]) - ).asInstanceOf[S with EfficientSplit] - } - - @SerialVersionUID(3L) - final class ofByte(val array: Array[Byte]) extends ArraySeq[Byte] { - def elemTag = ClassTag.Byte - def length: Int = array.length - def apply(index: Int): Byte = array(index) - def update(index: Int, elem: Byte): Unit = { array(index) = elem } - override def hashCode = MurmurHash3.arraySeqHash(array) - override def equals(that: Any) = that match { - case that: ofByte => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - override def iterator: Iterator[Byte] = new ArrayOps.ArrayIterator[Byte](array) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Byte, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParIntStepper(new WidenedByteArrayStepper(array, 0, array.length)) - else new WidenedByteArrayStepper(array, 0, array.length) - ).asInstanceOf[S with EfficientSplit] - } - - @SerialVersionUID(3L) - final class ofShort(val array: Array[Short]) extends ArraySeq[Short] { - def elemTag = ClassTag.Short - def length: Int = array.length - def apply(index: Int): Short = array(index) - def update(index: Int, elem: Short): Unit = { array(index) = elem } - override def hashCode = MurmurHash3.arraySeqHash(array) - override def equals(that: Any) = that match { - case that: ofShort => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - override def iterator: Iterator[Short] = new ArrayOps.ArrayIterator[Short](array) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Short, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParIntStepper(new WidenedShortArrayStepper(array, 0, array.length)) - else new WidenedShortArrayStepper(array, 0, array.length) - ).asInstanceOf[S with EfficientSplit] - } - - @SerialVersionUID(3L) - final class ofChar(val array: Array[Char]) extends ArraySeq[Char] { - def elemTag = ClassTag.Char - def length: Int = array.length - def apply(index: Int): Char = array(index) - def update(index: Int, elem: Char): Unit = { array(index) = elem } - override def hashCode = MurmurHash3.arraySeqHash(array) - override def equals(that: Any) = that match { - case that: ofChar => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - override def iterator: Iterator[Char] = new ArrayOps.ArrayIterator[Char](array) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParIntStepper(new WidenedCharArrayStepper(array, 0, array.length)) - else new WidenedCharArrayStepper(array, 0, array.length) - ).asInstanceOf[S with EfficientSplit] - - override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { - val jsb = sb.underlying - if (start.length != 0) jsb.append(start) - val len = array.length - if (len != 0) { - if (sep.isEmpty) jsb.append(array) - else { - jsb.ensureCapacity(jsb.length + len + end.length + (len - 1) * sep.length) - jsb.append(array(0)) - var i = 1 - while (i < len) { - jsb.append(sep) - jsb.append(array(i)) - i += 1 - } - } - } - if (end.length != 0) jsb.append(end) - sb - } - } - - @SerialVersionUID(3L) - final class ofInt(val array: Array[Int]) extends ArraySeq[Int] { - def elemTag = ClassTag.Int - def length: Int = array.length - def apply(index: Int): Int = array(index) - def update(index: Int, elem: Int): Unit = { array(index) = elem } - override def hashCode = MurmurHash3.arraySeqHash(array) - override def equals(that: Any) = that match { - case that: ofInt => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - override def iterator: Iterator[Int] = new ArrayOps.ArrayIterator[Int](array) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParIntStepper(new IntArrayStepper(array, 0, array.length)) - else new IntArrayStepper(array, 0, array.length) - ).asInstanceOf[S with EfficientSplit] - } - - @SerialVersionUID(3L) - final class ofLong(val array: Array[Long]) extends ArraySeq[Long] { - def elemTag = ClassTag.Long - def length: Int = array.length - def apply(index: Int): Long = array(index) - def update(index: Int, elem: Long): Unit = { array(index) = elem } - override def hashCode = MurmurHash3.arraySeqHash(array) - override def equals(that: Any) = that match { - case that: ofLong => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - override def iterator: Iterator[Long] = new ArrayOps.ArrayIterator[Long](array) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Long, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParLongStepper(new LongArrayStepper(array, 0, array.length)) - else new LongArrayStepper(array, 0, array.length) - ).asInstanceOf[S with EfficientSplit] - } - - @SerialVersionUID(3L) - final class ofFloat(val array: Array[Float]) extends ArraySeq[Float] { - def elemTag = ClassTag.Float - def length: Int = array.length - def apply(index: Int): Float = array(index) - def update(index: Int, elem: Float): Unit = { array(index) = elem } - override def hashCode = MurmurHash3.arraySeqHash(array) - override def equals(that: Any) = that match { - case that: ofFloat => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - override def iterator: Iterator[Float] = new ArrayOps.ArrayIterator[Float](array) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Float, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParDoubleStepper(new WidenedFloatArrayStepper(array, 0, array.length)) - else new WidenedFloatArrayStepper(array, 0, array.length) - ).asInstanceOf[S with EfficientSplit] - } - - @SerialVersionUID(3L) - final class ofDouble(val array: Array[Double]) extends ArraySeq[Double] { - def elemTag = ClassTag.Double - def length: Int = array.length - def apply(index: Int): Double = array(index) - def update(index: Int, elem: Double): Unit = { array(index) = elem } - override def hashCode = MurmurHash3.arraySeqHash(array) - override def equals(that: Any) = that match { - case that: ofDouble => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - override def iterator: Iterator[Double] = new ArrayOps.ArrayIterator[Double](array) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Double, S]): S with EfficientSplit = ( - if(shape.shape == StepperShape.ReferenceShape) - AnyStepper.ofParDoubleStepper(new DoubleArrayStepper(array, 0, array.length)) - else new DoubleArrayStepper(array, 0, array.length) - ).asInstanceOf[S with EfficientSplit] - } - - @SerialVersionUID(3L) - final class ofBoolean(val array: Array[Boolean]) extends ArraySeq[Boolean] { - def elemTag = ClassTag.Boolean - def length: Int = array.length - def apply(index: Int): Boolean = array(index) - def update(index: Int, elem: Boolean): Unit = { array(index) = elem } - override def hashCode = MurmurHash3.arraySeqHash(array) - override def equals(that: Any) = that match { - case that: ofBoolean => Arrays.equals(array, that.array) - case _ => super.equals(that) - } - override def iterator: Iterator[Boolean] = new ArrayOps.ArrayIterator[Boolean](array) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Boolean, S]): S with EfficientSplit = - new BoxedBooleanArrayStepper(array, 0, array.length).asInstanceOf[S with EfficientSplit] - } - - @SerialVersionUID(3L) - final class ofUnit(val array: Array[Unit]) extends ArraySeq[Unit] { - def elemTag = ClassTag.Unit - def length: Int = array.length - def apply(index: Int): Unit = array(index) - def update(index: Int, elem: Unit): Unit = { array(index) = elem } - override def hashCode = MurmurHash3.arraySeqHash(array) - override def equals(that: Any) = that match { - case that: ofUnit => array.length == that.array.length - case _ => super.equals(that) - } - override def iterator: Iterator[Unit] = new ArrayOps.ArrayIterator[Unit](array) - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Unit, S]): S with EfficientSplit = - new ObjectArrayStepper[AnyRef](array.asInstanceOf[Array[AnyRef]], 0, array.length).asInstanceOf[S with EfficientSplit] - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/BitSet.scala b/tests/pos-special/stdlib/collection/mutable/BitSet.scala deleted file mode 100644 index dcb8a157389b..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/BitSet.scala +++ /dev/null @@ -1,393 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.collection.immutable.Range -import BitSetOps.{LogWL, MaxSize} -import scala.annotation.implicitNotFound -import language.experimental.captureChecking - -/** - * A class for mutable bitsets. - * - * $bitsetinfo - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#mutable-bitsets "Scala's Collection Library overview"]] - * section on `Mutable Bitsets` for more information. - * - * @define Coll `BitSet` - * @define coll bitset - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -class BitSet(protected[collection] final var elems: Array[Long]) - extends AbstractSet[Int] - with SortedSet[Int] - with SortedSetOps[Int, SortedSet, BitSet] - with StrictOptimizedIterableOps[Int, Set, BitSet] - with StrictOptimizedSortedSetOps[Int, SortedSet, BitSet] - with collection.BitSet - with collection.BitSetOps[BitSet] - with Serializable { - - def this(initSize: Int) = this(new Array[Long](math.max((initSize + 63) >> 6, 1))) - - def this() = this(0) - - override protected def fromSpecific(coll: IterableOnce[Int]^): BitSet = bitSetFactory.fromSpecific(coll) - override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder - override def empty: BitSet = bitSetFactory.empty - - def bitSetFactory = BitSet - - override def unsorted: Set[Int] = this - - protected[collection] final def nwords: Int = elems.length - - protected[collection] final def word(idx: Int): Long = - if (idx < nwords) elems(idx) else 0L - - protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): BitSet = - if (elems.length == 0) empty - else new BitSet(elems) - - def addOne(elem: Int): this.type = { - require(elem >= 0) - if (!contains(elem)) { - val idx = elem >> LogWL - updateWord(idx, word(idx) | (1L << elem)) - } - this - } - - def subtractOne(elem: Int): this.type = { - require(elem >= 0) - if (contains(elem)) { - val idx = elem >> LogWL - updateWord(idx, word(idx) & ~(1L << elem)) - } - this - } - - def clear(): Unit = { - elems = new Array[Long](elems.length) - } - - protected final def updateWord(idx: Int, w: Long): Unit = { - ensureCapacity(idx) - elems(idx) = w - } - - protected final def ensureCapacity(idx: Int): Unit = { - require(idx < MaxSize) - if (idx >= nwords) { - var newlen = nwords - while (idx >= newlen) newlen = math.min(newlen * 2, MaxSize) - val elems1 = new Array[Long](newlen) - Array.copy(elems, 0, elems1, 0, nwords) - elems = elems1 - } - } - - def unconstrained: collection.Set[Int] = this - - /** Updates this bitset to the union with another bitset by performing a bitwise "or". - * - * @param other the bitset to form the union with. - * @return the bitset itself. - */ - def |= (other: collection.BitSet): this.type = { - ensureCapacity(other.nwords - 1) - var i = 0 - val othernwords = other.nwords - while (i < othernwords) { - elems(i) = elems(i) | other.word(i) - i += 1 - } - this - } - /** Updates this bitset to the intersection with another bitset by performing a bitwise "and". - * - * @param other the bitset to form the intersection with. - * @return the bitset itself. - */ - def &= (other: collection.BitSet): this.type = { - // Different from other operations: no need to ensure capacity because - // anything beyond the capacity is 0. Since we use other.word which is 0 - // off the end, we also don't need to make sure we stay in bounds there. - var i = 0 - val thisnwords = nwords - while (i < thisnwords) { - elems(i) = elems(i) & other.word(i) - i += 1 - } - this - } - /** Updates this bitset to the symmetric difference with another bitset by performing a bitwise "xor". - * - * @param other the bitset to form the symmetric difference with. - * @return the bitset itself. - */ - def ^= (other: collection.BitSet): this.type = { - ensureCapacity(other.nwords - 1) - var i = 0 - val othernwords = other.nwords - while (i < othernwords) { - - elems(i) = elems(i) ^ other.word(i) - i += 1 - } - this - } - /** Updates this bitset to the difference with another bitset by performing a bitwise "and-not". - * - * @param other the bitset to form the difference with. - * @return the bitset itself. - */ - def &~= (other: collection.BitSet): this.type = { - var i = 0 - val max = Math.min(nwords, other.nwords) - while (i < max) { - elems(i) = elems(i) & ~other.word(i) - i += 1 - } - this - } - - override def clone(): BitSet = new BitSet(java.util.Arrays.copyOf(elems, elems.length)) - - def toImmutable: immutable.BitSet = immutable.BitSet.fromBitMask(elems) - - override def map(f: Int => Int): BitSet = strictOptimizedMap(newSpecificBuilder, f) - override def map[B](f: Int => B)(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = - super[StrictOptimizedSortedSetOps].map(f) - - override def flatMap(f: Int => IterableOnce[Int]): BitSet = strictOptimizedFlatMap(newSpecificBuilder, f) - override def flatMap[B](f: Int => IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = - super[StrictOptimizedSortedSetOps].flatMap(f) - - override def collect(pf: PartialFunction[Int, Int]): BitSet = strictOptimizedCollect(newSpecificBuilder, pf) - override def collect[B](pf: scala.PartialFunction[Int, B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = - super[StrictOptimizedSortedSetOps].collect(pf) - - // necessary for disambiguation - override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.zipOrdMsg) ev: Ordering[(Int, B)]): SortedSet[(Int, B)] = - super.zip(that) - - override def addAll(xs: IterableOnce[Int]^): this.type = xs match { - case bs: collection.BitSet => - this |= bs - case range: Range => - if (range.nonEmpty) { - val start = range.min - if (start >= 0) { - val end = range.max - val endIdx = end >> LogWL - ensureCapacity(endIdx) - - if (range.step == 1 || range.step == -1) { - val startIdx = start >> LogWL - val wordStart = startIdx * BitSetOps.WordLength - val wordMask = -1L << (start - wordStart) - - if (endIdx > startIdx) { - elems(startIdx) |= wordMask - java.util.Arrays.fill(elems, startIdx + 1, endIdx, -1L) - elems(endIdx) |= -1L >>> (BitSetOps.WordLength - (end - endIdx * BitSetOps.WordLength) - 1) - } else elems(endIdx) |= (wordMask & (-1L >>> (BitSetOps.WordLength - (end - wordStart) - 1))) - } else super.addAll(range) - } else super.addAll(range) - } - this - - case sorted: collection.SortedSet[Int] => - // if `sorted` is using the regular Int ordering, ensure capacity for the largest - // element up front to avoid multiple resizing allocations - if (sorted.nonEmpty) { - val ord = sorted.ordering - if (ord eq Ordering.Int) { - ensureCapacity(sorted.lastKey >> LogWL) - } else if (ord eq Ordering.Int.reverse) { - ensureCapacity(sorted.firstKey >> LogWL) - } - val iter = sorted.iterator - while (iter.hasNext) { - addOne(iter.next()) - } - } - - this - - case other => - super.addAll(other) - } - - override def subsetOf(that: collection.Set[Int]): Boolean = that match { - case bs: collection.BitSet => - val thisnwords = this.nwords - val bsnwords = bs.nwords - val minWords = Math.min(thisnwords, bsnwords) - - // if any bits are set to `1` in words out of range of `bs`, then this is not a subset. Start there - var i = bsnwords - while (i < thisnwords) { - if (word(i) != 0L) return false - i += 1 - } - - // the higher range of `this` is all `0`s, fall back to lower range - var j = 0 - while (j < minWords) { - if ((word(j) & ~bs.word(j)) != 0L) return false - j += 1 - } - - true - case other => - super.subsetOf(other) - } - - override def subtractAll(xs: IterableOnce[Int]^): this.type = xs match { - case bs: collection.BitSet => this &~= bs - case other => super.subtractAll(other) - } - - protected[this] def writeReplace(): AnyRef = new BitSet.SerializationProxy(this) - - override def diff(that: collection.Set[Int]): BitSet = that match { - case bs: collection.BitSet => - /* - * Algorithm: - * - * We iterate, word-by-word, backwards from the shortest of the two bitsets (this, or bs) i.e. the one with - * the fewer words. - * - * Array Shrinking: - * If `this` is not longer than `bs`, then since we must iterate through the full array of words, - * we can track the new highest index word which is non-zero, at little additional cost. At the end, the new - * Array[Long] allocated for the returned BitSet will only be of size `maxNonZeroIndex + 1` - */ - - val bsnwords = bs.nwords - val thisnwords = nwords - if (bsnwords >= thisnwords) { - // here, we may have opportunity to shrink the size of the array - // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length - var i = thisnwords - 1 - var currentWord = 0L - - while (i >= 0 && currentWord == 0L) { - val oldWord = word(i) - currentWord = oldWord & ~bs.word(i) - i -= 1 - } - - if (i < 0) { - fromBitMaskNoCopy(Array(currentWord)) - } else { - val minimumNonZeroIndex: Int = i + 1 - val newArray = elems.take(minimumNonZeroIndex + 1) - newArray(i + 1) = currentWord - while (i >= 0) { - newArray(i) = word(i) & ~bs.word(i) - i -= 1 - } - fromBitMaskNoCopy(newArray) - } - } else { - // here, there is no opportunity to shrink the array size, no use in tracking highest non-zero index - val newElems = elems.clone() - var i = bsnwords - 1 - while (i >= 0) { - newElems(i) = word(i) & ~bs.word(i) - i -= 1 - } - fromBitMaskNoCopy(newElems) - } - case _ => super.diff(that) - } - - override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { - // We filter the BitSet from highest to lowest, so we can determine exactly the highest non-zero word - // index which lets us avoid: - // * over-allocating -- the resulting array will be exactly the right size - // * multiple resizing allocations -- the array is allocated one time, not log(n) times. - var i = nwords - 1 - var newArray: Array[Long] = null - while (i >= 0) { - val w = BitSetOps.computeWordForFilter(pred, isFlipped, word(i), i) - if (w != 0L) { - if (newArray eq null) { - newArray = new Array(i + 1) - } - newArray(i) = w - } - i -= 1 - } - if (newArray eq null) { - empty - } else { - fromBitMaskNoCopy(newArray) - } - } - - override def filterInPlace(p: Int => Boolean): this.type = { - val thisnwords = nwords - var i = 0 - while (i < thisnwords) { - elems(i) = BitSetOps.computeWordForFilter(p, isFlipped = false, elems(i), i) - i += 1 - } - this - } - - override def toBitMask: Array[Long] = elems.clone() -} - -@SerialVersionUID(3L) -object BitSet extends SpecificIterableFactory[Int, BitSet] { - - def fromSpecific(it: scala.collection.IterableOnce[Int]^): BitSet = Growable.from(empty, it) - - def empty: BitSet = new BitSet() - - def newBuilder: Builder[Int, BitSet] = new GrowableBuilder(empty) - - /** A bitset containing all the bits in an array */ - def fromBitMask(elems: Array[Long]): BitSet = { - val len = elems.length - if (len == 0) empty - else { - val a = java.util.Arrays.copyOf(elems, len) - new BitSet(a) - } - } - - /** A bitset containing all the bits in an array, wrapping the existing - * array without copying. - */ - def fromBitMaskNoCopy(elems: Array[Long]): BitSet = { - val len = elems.length - if (len == 0) empty - else new BitSet(elems) - } - - @SerialVersionUID(3L) - private final class SerializationProxy(coll: BitSet) extends scala.collection.BitSet.SerializationProxy(coll) { - protected[this] def readResolve(): Any = BitSet.fromBitMaskNoCopy(elems) - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/Buffer.scala b/tests/pos-special/stdlib/collection/mutable/Buffer.scala index 0f472dc9ac82..0a70c75bac0c 100644 --- a/tests/pos-special/stdlib/collection/mutable/Buffer.scala +++ b/tests/pos-special/stdlib/collection/mutable/Buffer.scala @@ -15,11 +15,10 @@ package mutable import scala.annotation.nowarn import language.experimental.captureChecking -import scala.annotation.unchecked.uncheckedCaptures /** A `Buffer` is a growable and shrinkable `Seq`. */ -trait Buffer[sealed A] +trait Buffer[A] extends Seq[A] with SeqOps[A, Buffer, Buffer[A]] with Growable[A] @@ -186,7 +185,7 @@ trait IndexedBuffer[A] extends IndexedSeq[A] // There's scope for a better implementation which copies elements in place. var i = 0 val s = size - val newElems = new Array[(IterableOnce[A]^) @uncheckedCaptures](s) + val newElems = new Array[IterableOnce[A]^](s) while (i < s) { newElems(i) = f(this(i)); i += 1 } clear() i = 0 diff --git a/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala b/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala deleted file mode 100644 index 152b6cc9ffc7..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable -import language.experimental.captureChecking - -private[mutable] trait CheckedIndexedSeqView[+A] extends IndexedSeqView[A] { - this: CheckedIndexedSeqView[A]^ => - - protected val mutationCount: () => Int - - override def iterator: Iterator[A]^{this} = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) - override def reverseIterator: Iterator[A]^{this} = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) - - override def appended[B >: A](elem: B): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) - override def prepended[B >: A](elem: B): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) - override def take(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Take(this, n)(mutationCount) - override def takeRight(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) - override def drop(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) - override def dropRight(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) - override def map[B](f: A => B): IndexedSeqView[B]^{this, f} = new CheckedIndexedSeqView.Map(this, f)(mutationCount) - override def reverse: IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Reverse(this)(mutationCount) - override def slice(from: Int, until: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) - override def tapEach[U](f: A => U): IndexedSeqView[A]^{this, f} = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) - - override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) - override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) - override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) -} - -private[mutable] object CheckedIndexedSeqView { - import IndexedSeqView.SomeIndexedSeqOps - - @SerialVersionUID(3L) - private[mutable] class CheckedIterator[A](self: IndexedSeqView[A]^, mutationCount: => Int) - extends IndexedSeqView.IndexedSeqViewIterator[A](self) { - private[this] val expectedCount = mutationCount - override def hasNext: Boolean = { - MutationTracker.checkMutationsForIteration(expectedCount, mutationCount) - super.hasNext - } - } - - @SerialVersionUID(3L) - private[mutable] class CheckedReverseIterator[A](self: IndexedSeqView[A]^, mutationCount: => Int) - extends IndexedSeqView.IndexedSeqViewReverseIterator[A](self) { - private[this] val expectedCount = mutationCount - override def hasNext: Boolean = { - MutationTracker.checkMutationsForIteration(expectedCount, mutationCount) - super.hasNext - } - } - - @SerialVersionUID(3L) - class Id[+A](underlying: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) - extends IndexedSeqView.Id(underlying) with CheckedIndexedSeqView[A] - - @SerialVersionUID(3L) - class Appended[+A](underlying: SomeIndexedSeqOps[A]^, elem: A)(protected val mutationCount: () => Int) - extends IndexedSeqView.Appended(underlying, elem) with CheckedIndexedSeqView[A] - - @SerialVersionUID(3L) - class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) - extends IndexedSeqView.Prepended(elem, underlying) with CheckedIndexedSeqView[A] - - @SerialVersionUID(3L) - class Concat[A](prefix: SomeIndexedSeqOps[A]^, suffix: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) - extends IndexedSeqView.Concat[A](prefix, suffix) with CheckedIndexedSeqView[A] - - @SerialVersionUID(3L) - class Take[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) - extends IndexedSeqView.Take(underlying, n) with CheckedIndexedSeqView[A] - - @SerialVersionUID(3L) - class TakeRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) - extends IndexedSeqView.TakeRight(underlying, n) with CheckedIndexedSeqView[A] - - @SerialVersionUID(3L) - class Drop[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) - extends IndexedSeqView.Drop[A](underlying, n) with CheckedIndexedSeqView[A] - - @SerialVersionUID(3L) - class DropRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) - extends IndexedSeqView.DropRight[A](underlying, n) with CheckedIndexedSeqView[A] - - @SerialVersionUID(3L) - class Map[A, B](underlying: SomeIndexedSeqOps[A]^, f: A => B)(protected val mutationCount: () => Int) - extends IndexedSeqView.Map(underlying, f) with CheckedIndexedSeqView[B] - - @SerialVersionUID(3L) - class Reverse[A](underlying: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) - extends IndexedSeqView.Reverse[A](underlying) with CheckedIndexedSeqView[A] { - override def reverse: IndexedSeqView[A] = underlying match { - case x: IndexedSeqView[A] => x - case _ => super.reverse - } - } - - @SerialVersionUID(3L) - class Slice[A](underlying: SomeIndexedSeqOps[A]^, from: Int, until: Int)(protected val mutationCount: () => Int) - extends AbstractIndexedSeqView[A] with CheckedIndexedSeqView[A] { - protected val lo = from max 0 - protected val hi = (until max 0) min underlying.length - protected val len = (hi - lo) max 0 - @throws[IndexOutOfBoundsException] - def apply(i: Int): A = underlying(lo + i) - def length: Int = len - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/Cloneable.scala b/tests/pos-special/stdlib/collection/mutable/Cloneable.scala deleted file mode 100644 index 39149e98cbf0..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/Cloneable.scala +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.mutable -import language.experimental.captureChecking - -/** A trait for cloneable collections. - * - * @tparam C Type of the collection, covariant and with reference types as upperbound. - */ -trait Cloneable[+C <: AnyRef] extends scala.Cloneable { - override def clone(): C = super.clone().asInstanceOf[C] -} diff --git a/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala b/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala deleted file mode 100644 index 2b27efb6eac1..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala +++ /dev/null @@ -1,889 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import scala.{unchecked => uc} -import scala.annotation.{implicitNotFound, tailrec, unused} -import scala.annotation.unchecked.uncheckedVariance -import scala.collection.generic.DefaultSerializationProxy -import scala.runtime.Statics -import language.experimental.captureChecking - -/** This class implements mutable maps using a hashtable with red-black trees in the buckets for good - * worst-case performance on hash collisions. An `Ordering` is required for the element type. Equality - * as determined by the `Ordering` has to be consistent with `equals` and `hashCode`. Universal equality - * of numeric types is not supported (similar to `AnyRefMap`). - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] - * section on `Hash Tables` for more information. - * - * @define Coll `mutable.CollisionProofHashMap` - * @define coll mutable collision-proof hash map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -final class CollisionProofHashMap[sealed K, sealed V](initialCapacity: Int, loadFactor: Double)(implicit ordering: Ordering[K]) - extends AbstractMap[K, V] - with MapOps[K, V, Map, CollisionProofHashMap[K, V]] //-- - with StrictOptimizedIterableOps[(K, V), Iterable, CollisionProofHashMap[K, V]] - with StrictOptimizedMapOps[K, V, Map, CollisionProofHashMap[K, V]] { //-- - - private[this] final def sortedMapFactory: SortedMapFactory[CollisionProofHashMap] = CollisionProofHashMap - - def this()(implicit ordering: Ordering[K]) = this(CollisionProofHashMap.defaultInitialCapacity, CollisionProofHashMap.defaultLoadFactor)(ordering) - - import CollisionProofHashMap.Node - private[this] type RBNode = CollisionProofHashMap.RBNode[K, V] - private[this] type LLNode = CollisionProofHashMap.LLNode[K, V] - - /** The actual hash table. */ - private[this] var table: Array[Node] = new Array[Node](tableSizeFor(initialCapacity)) - - /** The next size value at which to resize (capacity * load factor). */ - private[this] var threshold: Int = newThreshold(table.length) - - private[this] var contentSize = 0 - - override def size: Int = contentSize - - @`inline` private[this] final def computeHash(o: K): Int = { - val h = if(o.asInstanceOf[AnyRef] eq null) 0 else o.hashCode - h ^ (h >>> 16) - } - - @`inline` private[this] final def index(hash: Int) = hash & (table.length - 1) - - override protected def fromSpecific(coll: (IterableOnce[(K, V)]^) @uncheckedVariance): CollisionProofHashMap[K, V] @uncheckedVariance = CollisionProofHashMap.from(coll) - override protected def newSpecificBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] @uncheckedVariance = CollisionProofHashMap.newBuilder[K, V] - - override def empty: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] - - override def contains(key: K): Boolean = findNode(key) ne null - - def get(key: K): Option[V] = findNode(key) match { - case null => None - case nd => Some(nd match { - case nd: LLNode @uc => nd.value - case nd: RBNode @uc => nd.value - }) - } - - @throws[NoSuchElementException] - override def apply(key: K): V = findNode(key) match { - case null => default(key) - case nd => nd match { - case nd: LLNode @uc => nd.value - case nd: RBNode @uc => nd.value - } - } - - override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { - val nd = findNode(key) - if (nd eq null) default else nd match { - case nd: LLNode @uc => nd.value - case n => n.asInstanceOf[RBNode].value - } - } - - @`inline` private[this] def findNode(elem: K): Node = { - val hash = computeHash(elem) - table(index(hash)) match { - case null => null - case n: LLNode @uc => n.getNode(elem, hash) - case n => n.asInstanceOf[RBNode].getNode(elem, hash) - } - } - - override def sizeHint(size: Int): Unit = { - val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) - if(target > table.length) { - if(size == 0) reallocTable(target) - else growTable(target) - } - } - - override def update(key: K, value: V): Unit = put0(key, value, false) - - override def put(key: K, value: V): Option[V] = put0(key, value, true) match { - case null => None - case sm => sm - } - - def addOne(elem: (K, V)): this.type = { put0(elem._1, elem._2, false); this } - - @`inline` private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { - if(contentSize + 1 >= threshold) growTable(table.length * 2) - val hash = computeHash(key) - val idx = index(hash) - put0(key, value, getOld, hash, idx) - } - - private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { - val res = table(idx) match { - case n: RBNode @uc => - insert(n, idx, key, hash, value) - case _old => - val old: LLNode = _old.asInstanceOf[LLNode] - if(old eq null) { - table(idx) = new LLNode(key, hash, value, null) - } else { - var remaining = CollisionProofHashMap.treeifyThreshold - var prev: LLNode = null - var n = old - while((n ne null) && n.hash <= hash && remaining > 0) { - if(n.hash == hash && key == n.key) { - val old = n.value - n.value = value - return (if(getOld) Some(old) else null) - } - prev = n - n = n.next - remaining -= 1 - } - if(remaining == 0) { - treeify(old, idx) - return put0(key, value, getOld, hash, idx) - } - if(prev eq null) table(idx) = new LLNode(key, hash, value, old) - else prev.next = new LLNode(key, hash, value, prev.next) - } - true - } - if(res) contentSize += 1 - if(res) Some(null.asInstanceOf[V]) else null //TODO - } - - private[this] def treeify(old: LLNode, idx: Int): Unit = { - table(idx) = CollisionProofHashMap.leaf(old.key, old.hash, old.value, red = false, null) - var n: LLNode = old.next - while(n ne null) { - val root = table(idx).asInstanceOf[RBNode] - insertIntoExisting(root, idx, n.key, n.hash, n.value, root) - n = n.next - } - } - - override def addAll(xs: IterableOnce[(K, V)]^): this.type = { - val k = xs.knownSize - if(k > 0) sizeHint(contentSize + k) - super.addAll(xs) - } - - // returns the old value or Statics.pfMarker if not found - private[this] def remove0(elem: K) : Any = { - val hash = computeHash(elem) - val idx = index(hash) - table(idx) match { - case null => Statics.pfMarker - case t: RBNode @uc => - val v = delete(t, idx, elem, hash) - if(v.asInstanceOf[AnyRef] ne Statics.pfMarker) contentSize -= 1 - v - case nd: LLNode @uc if nd.hash == hash && nd.key == elem => - // first element matches - table(idx) = nd.next - contentSize -= 1 - nd.value - case nd: LLNode @uc => - // find an element that matches - var prev = nd - var next = nd.next - while((next ne null) && next.hash <= hash) { - if(next.hash == hash && next.key == elem) { - prev.next = next.next - contentSize -= 1 - return next.value - } - prev = next - next = next.next - } - Statics.pfMarker - } - } - - private[this] abstract class MapIterator[R] extends AbstractIterator[R] { - protected[this] def extract(node: LLNode): R - protected[this] def extract(node: RBNode): R - - private[this] var i = 0 - private[this] var node: Node = null - private[this] val len = table.length - - def hasNext: Boolean = { - if(node ne null) true - else { - while(i < len) { - val n = table(i) - i += 1 - n match { - case null => - case n: RBNode @uc => - node = CollisionProofHashMap.minNodeNonNull(n) - return true - case n: LLNode @uc => - node = n - return true - } - } - false - } - } - - def next(): R = - if(!hasNext) Iterator.empty.next() - else node match { - case n: RBNode @uc => - val r = extract(n) - node = CollisionProofHashMap.successor(n ) - r - case n: LLNode @uc => - val r = extract(n) - node = n.next - r - } - } - - override def keysIterator: Iterator[K] = { - if (isEmpty) Iterator.empty - else new MapIterator[K] { - protected[this] def extract(node: LLNode) = node.key - protected[this] def extract(node: RBNode) = node.key - } - } - - override def iterator: Iterator[(K, V)] = { - if (isEmpty) Iterator.empty - else new MapIterator[(K, V)] { - protected[this] def extract(node: LLNode) = (node.key, node.value) - protected[this] def extract(node: RBNode) = (node.key, node.value) - } - } - - private[this] def growTable(newlen: Int) = { - var oldlen = table.length - table = java.util.Arrays.copyOf(table, newlen) - threshold = newThreshold(table.length) - while(oldlen < newlen) { - var i = 0 - while (i < oldlen) { - val old = table(i) - if(old ne null) splitBucket(old, i, i + oldlen, oldlen) - i += 1 - } - oldlen *= 2 - } - } - - @`inline` private[this] def reallocTable(newlen: Int) = { - table = new Array(newlen) - threshold = newThreshold(table.length) - } - - @`inline` private[this] def splitBucket(tree: Node, lowBucket: Int, highBucket: Int, mask: Int): Unit = tree match { - case t: LLNode @uc => splitBucket(t, lowBucket, highBucket, mask) - case t: RBNode @uc => splitBucket(t, lowBucket, highBucket, mask) - } - - private[this] def splitBucket(list: LLNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { - val preLow: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) - val preHigh: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) - //preLow.next = null - //preHigh.next = null - var lastLow: LLNode = preLow - var lastHigh: LLNode = preHigh - var n = list - while(n ne null) { - val next = n.next - if((n.hash & mask) == 0) { // keep low - lastLow.next = n - lastLow = n - } else { // move to high - lastHigh.next = n - lastHigh = n - } - n = next - } - lastLow.next = null - if(list ne preLow.next) table(lowBucket) = preLow.next - if(preHigh.next ne null) { - table(highBucket) = preHigh.next - lastHigh.next = null - } - } - - private[this] def splitBucket(tree: RBNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { - var lowCount, highCount = 0 - tree.foreachNode((n: RBNode) => if((n.hash & mask) != 0) highCount += 1 else lowCount += 1) - if(highCount != 0) { - if(lowCount == 0) { - table(lowBucket) = null - table(highBucket) = tree - } else { - table(lowBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) == 0), lowCount) - table(highBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) != 0), highCount) - } - } - } - - private[this] def tableSizeFor(capacity: Int) = - (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) - - private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt - - override def clear(): Unit = { - java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) - contentSize = 0 - } - - override def remove(key: K): Option[V] = { - val v = remove0(key) - if(v.asInstanceOf[AnyRef] eq Statics.pfMarker) None else Some(v.asInstanceOf[V]) - } - - def subtractOne(elem: K): this.type = { remove0(elem); this } - - override def knownSize: Int = size - - override def isEmpty: Boolean = size == 0 - - override def foreach[U](f: ((K, V)) => U): Unit = { - val len = table.length - var i = 0 - while(i < len) { - val n = table(i) - if(n ne null) n match { - case n: LLNode @uc => n.foreach(f) - case n: RBNode @uc => n.foreach(f) - } - i += 1 - } - } - - override def foreachEntry[U](f: (K, V) => U): Unit = { - val len = table.length - var i = 0 - while(i < len) { - val n = table(i) - if(n ne null) n match { - case n: LLNode @uc => n.foreachEntry(f) - case n: RBNode @uc => n.foreachEntry(f) - } - i += 1 - } - } - - protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new CollisionProofHashMap.DeserializationFactory[K, V](table.length, loadFactor, ordering), this) - - override protected[this] def className = "CollisionProofHashMap" - - override def getOrElseUpdate(key: K, defaultValue: => V): V = { - val hash = computeHash(key) - val idx = index(hash) - table(idx) match { - case null => () - case n: LLNode @uc => - val nd = n.getNode(key, hash) - if(nd != null) return nd.value - case n => - val nd = n.asInstanceOf[RBNode].getNode(key, hash) - if(nd != null) return nd.value - } - val table0 = table - val default = defaultValue - if(contentSize + 1 >= threshold) growTable(table.length * 2) - // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. - val newIdx = if (table0 eq table) idx else index(hash) - put0(key, default, false, hash, newIdx) - default - } - - ///////////////////// Overrides code from SortedMapOps - - /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll. - * - * @param f the function to apply to each element. - * @return a new $coll resulting from applying the given function - * `f` to each element of this $coll and collecting the results. - */ - def map[K2, V2](f: ((K, V)) => (K2, V2)) - (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = - sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) - - /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll - * and using the elements of the resulting collections. - * - * @param f the function to apply to each element. - * @return a new $coll resulting from applying the given collection-valued function - * `f` to each element of this $coll and concatenating the results. - */ - def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]) - (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = - sortedMapFactory.from(new View.FlatMap(this, f)) - - /** Builds a new sorted map by applying a partial function to all elements of this $coll - * on which the function is defined. - * - * @param pf the partial function which filters and maps the $coll. - * @return a new $coll resulting from applying the given partial function - * `pf` to each element on which it is defined and collecting the results. - * The order of the elements is preserved. - */ - def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]) - (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = - sortedMapFactory.from(new View.Collect(this, pf)) - - override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CollisionProofHashMap[K, V2] = sortedMapFactory.from(suffix match { - case it: Iterable[(K, V2)] => new View.Concat(this, it) - case _ => iterator.concat(suffix.iterator) - }) - - /** Alias for `concat` */ - @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]^): CollisionProofHashMap[K, V2] = concat(xs) - - @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") - override def + [V1 >: V](kv: (K, V1)): CollisionProofHashMap[K, V1] = - sortedMapFactory.from(new View.Appended(this, kv)) - - @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CollisionProofHashMap[K, V1] = - sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) - - ///////////////////// RedBlackTree code derived from mutable.RedBlackTree: - - @`inline` private[this] def isRed(node: RBNode) = (node ne null) && node.red - @`inline` private[this] def isBlack(node: RBNode) = (node eq null) || !node.red - - @unused @`inline` private[this] def compare(key: K, hash: Int, node: LLNode): Int = { - val i = hash - node.hash - if(i != 0) i else ordering.compare(key, node.key) - } - - @`inline` private[this] def compare(key: K, hash: Int, node: RBNode): Int = { - /*val i = hash - node.hash - if(i != 0) i else*/ ordering.compare(key, node.key) - } - - // ---- insertion ---- - - @tailrec private[this] final def insertIntoExisting(_root: RBNode, bucket: Int, key: K, hash: Int, value: V, x: RBNode): Boolean = { - val cmp = compare(key, hash, x) - if(cmp == 0) { - x.value = value - false - } else { - val next = if(cmp < 0) x.left else x.right - if(next eq null) { - val z = CollisionProofHashMap.leaf(key, hash, value, red = true, x) - if (cmp < 0) x.left = z else x.right = z - table(bucket) = fixAfterInsert(_root, z) - return true - } - else insertIntoExisting(_root, bucket, key, hash, value, next) - } - } - - private[this] final def insert(tree: RBNode, bucket: Int, key: K, hash: Int, value: V): Boolean = { - if(tree eq null) { - table(bucket) = CollisionProofHashMap.leaf(key, hash, value, red = false, null) - true - } else insertIntoExisting(tree, bucket, key, hash, value, tree) - } - - private[this] def fixAfterInsert(_root: RBNode, node: RBNode): RBNode = { - var root = _root - var z = node - while (isRed(z.parent)) { - if (z.parent eq z.parent.parent.left) { - val y = z.parent.parent.right - if (isRed(y)) { - z.parent.red = false - y.red = false - z.parent.parent.red = true - z = z.parent.parent - } else { - if (z eq z.parent.right) { - z = z.parent - root = rotateLeft(root, z) - } - z.parent.red = false - z.parent.parent.red = true - root = rotateRight(root, z.parent.parent) - } - } else { // symmetric cases - val y = z.parent.parent.left - if (isRed(y)) { - z.parent.red = false - y.red = false - z.parent.parent.red = true - z = z.parent.parent - } else { - if (z eq z.parent.left) { - z = z.parent - root = rotateRight(root, z) - } - z.parent.red = false - z.parent.parent.red = true - root = rotateLeft(root, z.parent.parent) - } - } - } - root.red = false - root - } - - // ---- deletion ---- - - // returns the old value or Statics.pfMarker if not found - private[this] def delete(_root: RBNode, bucket: Int, key: K, hash: Int): Any = { - var root = _root - val z = root.getNode(key, hash: Int) - if (z ne null) { - val oldValue = z.value - var y = z - var yIsRed = y.red - var x: RBNode = null - var xParent: RBNode = null - - if (z.left eq null) { - x = z.right - root = transplant(root, z, z.right) - xParent = z.parent - } - else if (z.right eq null) { - x = z.left - root = transplant(root, z, z.left) - xParent = z.parent - } - else { - y = CollisionProofHashMap.minNodeNonNull(z.right) - yIsRed = y.red - x = y.right - - if (y.parent eq z) xParent = y - else { - xParent = y.parent - root = transplant(root, y, y.right) - y.right = z.right - y.right.parent = y - } - root = transplant(root, z, y) - y.left = z.left - y.left.parent = y - y.red = z.red - } - - if (!yIsRed) root = fixAfterDelete(root, x, xParent) - if(root ne _root) table(bucket) = root - oldValue - } else Statics.pfMarker - } - - private[this] def fixAfterDelete(_root: RBNode, node: RBNode, parent: RBNode): RBNode = { - var root = _root - var x = node - var xParent = parent - while ((x ne root) && isBlack(x)) { - if (x eq xParent.left) { - var w = xParent.right - // assert(w ne null) - - if (w.red) { - w.red = false - xParent.red = true - root = rotateLeft(root, xParent) - w = xParent.right - } - if (isBlack(w.left) && isBlack(w.right)) { - w.red = true - x = xParent - } else { - if (isBlack(w.right)) { - w.left.red = false - w.red = true - root = rotateRight(root, w) - w = xParent.right - } - w.red = xParent.red - xParent.red = false - w.right.red = false - root = rotateLeft(root, xParent) - x = root - } - } else { // symmetric cases - var w = xParent.left - // assert(w ne null) - - if (w.red) { - w.red = false - xParent.red = true - root = rotateRight(root, xParent) - w = xParent.left - } - if (isBlack(w.right) && isBlack(w.left)) { - w.red = true - x = xParent - } else { - if (isBlack(w.left)) { - w.right.red = false - w.red = true - root = rotateLeft(root, w) - w = xParent.left - } - w.red = xParent.red - xParent.red = false - w.left.red = false - root = rotateRight(root, xParent) - x = root - } - } - xParent = x.parent - } - if (x ne null) x.red = false - root - } - - // ---- helpers ---- - - @`inline` private[this] def rotateLeft(_root: RBNode, x: RBNode): RBNode = { - var root = _root - val y = x.right - x.right = y.left - - val xp = x.parent - if (y.left ne null) y.left.parent = x - y.parent = xp - - if (xp eq null) root = y - else if (x eq xp.left) xp.left = y - else xp.right = y - - y.left = x - x.parent = y - root - } - - @`inline` private[this] def rotateRight(_root: RBNode, x: RBNode): RBNode = { - var root = _root - val y = x.left - x.left = y.right - - val xp = x.parent - if (y.right ne null) y.right.parent = x - y.parent = xp - - if (xp eq null) root = y - else if (x eq xp.right) xp.right = y - else xp.left = y - - y.right = x - x.parent = y - root - } - - /** - * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous - * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged. - */ - private[this] def transplant(_root: RBNode, to: RBNode, from: RBNode): RBNode = { - var root = _root - if (to.parent eq null) root = from - else if (to eq to.parent.left) to.parent.left = from - else to.parent.right = from - if (from ne null) from.parent = to.parent - root - } - - // building - - def fromNodes(xs: Iterator[Node], size: Int): RBNode = { - val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes - def f(level: Int, size: Int): RBNode = size match { - case 0 => null - case 1 => - val nn = xs.next() - val (key, hash, value) = nn match { - case nn: LLNode @uc => (nn.key, nn.hash, nn.value) - case nn: RBNode @uc => (nn.key, nn.hash, nn.value) - } - new RBNode(key, hash, value, level == maxUsedDepth && level != 1, null, null, null) - case n => - val leftSize = (size-1)/2 - val left = f(level+1, leftSize) - val nn = xs.next() - val right = f(level+1, size-1-leftSize) - val (key, hash, value) = nn match { - case nn: LLNode @uc => (nn.key, nn.hash, nn.value) - case nn: RBNode @uc => (nn.key, nn.hash, nn.value) - } - val n = new RBNode(key, hash, value, false, left, right, null) - if(left ne null) left.parent = n - right.parent = n - n - } - f(1, size) - } -} - -/** - * $factoryInfo - * @define Coll `mutable.CollisionProofHashMap` - * @define coll mutable collision-proof hash map - */ -@SerialVersionUID(3L) -object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { - private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a CollisionProofHashMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." - - def from[sealed K : Ordering, sealed V](it: scala.collection.IterableOnce[(K, V)]^): CollisionProofHashMap[K, V] = { - val k = it.knownSize - val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity - new CollisionProofHashMap[K, V](cap, defaultLoadFactor) ++= it - } - - def empty[sealed K : Ordering, sealed V]: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] - - def newBuilder[sealed K : Ordering, sealed V]: Builder[(K, V), CollisionProofHashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) - - def newBuilder[sealed K : Ordering, sealed V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), CollisionProofHashMap[K, V]] = - new GrowableBuilder[(K, V), CollisionProofHashMap[K, V]](new CollisionProofHashMap[K, V](initialCapacity, loadFactor)) { - override def sizeHint(size: Int) = elems.sizeHint(size) - } - - /** The default load factor for the hash table */ - final def defaultLoadFactor: Double = 0.75 - - /** The default initial capacity for the hash table */ - final def defaultInitialCapacity: Int = 16 - - @SerialVersionUID(3L) - private final class DeserializationFactory[sealed K, sealed V](val tableLength: Int, val loadFactor: Double, val ordering: Ordering[K]) extends Factory[(K, V), CollisionProofHashMap[K, V]] with Serializable { - def fromSpecific(it: IterableOnce[(K, V)]^): CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V](tableLength, loadFactor)(ordering) ++= it - def newBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] = CollisionProofHashMap.newBuilder(tableLength, loadFactor)(ordering) - } - - @unused @`inline` private def compare[K, V](key: K, hash: Int, node: LLNode[K, V])(implicit ord: Ordering[K]): Int = { - val i = hash - node.hash - if(i != 0) i else ord.compare(key, node.key) - } - - @`inline` private def compare[K, V](key: K, hash: Int, node: RBNode[K, V])(implicit ord: Ordering[K]): Int = { - /*val i = hash - node.hash - if(i != 0) i else*/ ord.compare(key, node.key) - } - - private final val treeifyThreshold = 8 - - // Superclass for RBNode and LLNode to help the JIT with optimizing instance checks, but no shared common fields. - // Keeping calls monomorphic where possible and dispatching manually where needed is faster. - sealed abstract class Node - - /////////////////////////// Red-Black Tree Node - - final class RBNode[sealed K, sealed V](var key: K, var hash: Int, var value: V, var red: Boolean, var left: RBNode[K, V], var right: RBNode[K, V], var parent: RBNode[K, V]) extends Node { - override def toString: String = "RBNode(" + key + ", " + hash + ", " + value + ", " + red + ", " + left + ", " + right + ")" - - @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): RBNode[K, V] = { - val cmp = compare(k, h, this) - if (cmp < 0) { - if(left ne null) left.getNode(k, h) else null - } else if (cmp > 0) { - if(right ne null) right.getNode(k, h) else null - } else this - } - - def foreach[U](f: ((K, V)) => U): Unit = { - if(left ne null) left.foreach(f) - f((key, value)) - if(right ne null) right.foreach(f) - } - - def foreachEntry[U](f: (K, V) => U): Unit = { - if(left ne null) left.foreachEntry(f) - f(key, value) - if(right ne null) right.foreachEntry(f) - } - - def foreachNode[U](f: RBNode[K, V] => U): Unit = { - if(left ne null) left.foreachNode(f) - f(this) - if(right ne null) right.foreachNode(f) - } - } - - @`inline` private def leaf[sealed A, sealed B](key: A, hash: Int, value: B, red: Boolean, parent: RBNode[A, B]): RBNode[A, B] = - new RBNode(key, hash, value, red, null, null, parent) - - @tailrec private def minNodeNonNull[sealed A, sealed B](node: RBNode[A, B]): RBNode[A, B] = - if (node.left eq null) node else minNodeNonNull(node.left) - - /** - * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is, - * therefore, the last node), this method returns `null`. - */ - private def successor[sealed A, sealed B](node: RBNode[A, B]): RBNode[A, B] = { - if (node.right ne null) minNodeNonNull(node.right) - else { - var x = node - var y = x.parent - while ((y ne null) && (x eq y.right)) { - x = y - y = y.parent - } - y - } - } - - private final class RBNodesIterator[sealed A, sealed B](tree: RBNode[A, B])(implicit @unused ord: Ordering[A]) extends AbstractIterator[RBNode[A, B]] { - private[this] var nextNode: RBNode[A, B] = if(tree eq null) null else minNodeNonNull(tree) - - def hasNext: Boolean = nextNode ne null - - @throws[NoSuchElementException] - def next(): RBNode[A, B] = nextNode match { - case null => Iterator.empty.next() - case node => - nextNode = successor(node) - node - } - } - - /////////////////////////// Linked List Node - - private final class LLNode[sealed K, sealed V](var key: K, var hash: Int, var value: V, var next: LLNode[K, V]) extends Node { - override def toString = s"LLNode($key, $value, $hash) -> $next" - - private[this] def eq(a: Any, b: Any): Boolean = - if(a.asInstanceOf[AnyRef] eq null) b.asInstanceOf[AnyRef] eq null else a.asInstanceOf[AnyRef].equals(b) - - @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): LLNode[K, V] = { - if(h == hash && eq(k, key) /*ord.compare(k, key) == 0*/) this - else if((next eq null) || (hash > h)) null - else next.getNode(k, h) - } - - @tailrec def foreach[U](f: ((K, V)) => U): Unit = { - f((key, value)) - if(next ne null) next.foreach(f) - } - - @tailrec def foreachEntry[U](f: (K, V) => U): Unit = { - f(key, value) - if(next ne null) next.foreachEntry(f) - } - - @tailrec def foreachNode[U](f: LLNode[K, V] => U): Unit = { - f(this) - if(next ne null) next.foreachNode(f) - } - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala deleted file mode 100644 index 4d6f989e6f3d..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.mutable -import language.experimental.captureChecking - -/** The canonical builder for collections that are growable, i.e. that support an - * efficient `+=` method which adds an element to the collection. - * - * GrowableBuilders can produce only a single instance of the collection they are growing. - * - * @define Coll `GrowingBuilder` - * @define coll growing builder - */ -class GrowableBuilder[Elem, To <: Growable[Elem]](protected val elems: To) - extends Builder[Elem, To] { - - def clear(): Unit = elems.clear() - - def result(): To = elems - - def addOne(elem: Elem): this.type = { elems += elem; this } - - override def addAll(xs: IterableOnce[Elem]^): this.type = { elems.addAll(xs); this } - - override def knownSize: Int = elems.knownSize -} diff --git a/tests/pos-special/stdlib/collection/mutable/HashMap.scala b/tests/pos-special/stdlib/collection/mutable/HashMap.scala deleted file mode 100644 index ab45e7ffc73d..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/HashMap.scala +++ /dev/null @@ -1,655 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import scala.annotation.{nowarn, tailrec} -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializationProxy -import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking - -/** This class implements mutable maps using a hashtable. - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] - * section on `Hash Tables` for more information. - * - * @tparam K the type of the keys contained in this hash map. - * @tparam V the type of the values assigned to keys in this hash map. - * - * @define Coll `mutable.HashMap` - * @define coll mutable hash map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@deprecatedInheritance("HashMap will be made final; use .withDefault for the common use case of computing a default value", "2.13.0") -class HashMap[sealed K, sealed V](initialCapacity: Int, loadFactor: Double) - extends AbstractMap[K, V] - with MapOps[K, V, HashMap, HashMap[K, V]] - with StrictOptimizedIterableOps[(K, V), Iterable, HashMap[K, V]] - with StrictOptimizedMapOps[K, V, HashMap, HashMap[K, V]] - with MapFactoryDefaults[K, V, HashMap, Iterable] - with Serializable { - - /* The HashMap class holds the following invariant: - * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. - * - Every bucket is sorted in ascendent hash order - * - The sum of the lengths of all buckets is equal to contentSize. - */ - def this() = this(HashMap.defaultInitialCapacity, HashMap.defaultLoadFactor) - - import HashMap.Node - - /** The actual hash table. */ - private[this] var table = new Array[Node[K, V]](tableSizeFor(initialCapacity)) - - /** The next size value at which to resize (capacity * load factor). */ - private[this] var threshold: Int = newThreshold(table.length) - - private[this] var contentSize = 0 - - override def size: Int = contentSize - - /** Performs the inverse operation of improveHash. In this case, it happens to be identical to improveHash*/ - @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) - - /** Computes the improved hash of an original (`any.##`) hash. */ - @`inline` private[this] def improveHash(originalHash: Int): Int = { - // Improve the hash by xoring the high 16 bits into the low 16 bits just in case entropy is skewed towards the - // high-value bits. We only use the lowest bits to determine the hash bucket. This is the same improvement - // algorithm as in java.util.HashMap. - // - // This function is also its own inverse. That is, for all ints i, improveHash(improveHash(i)) = i - // this allows us to retrieve the original hash when we need it, for instance when appending to an immutable.HashMap - // and that is why unimproveHash simply forwards to this method - originalHash ^ (originalHash >>> 16) - } - - /** Computes the improved hash of this key */ - @`inline` private[this] def computeHash(o: K): Int = improveHash(o.##) - - @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) - - override def contains(key: K): Boolean = findNode(key) ne null - - @`inline` private[this] def findNode(key: K): Node[K, V] = { - val hash = computeHash(key) - table(index(hash)) match { - case null => null - case nd => nd.findNode(key, hash) - } - } - - override def sizeHint(size: Int): Unit = { - val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) - if(target > table.length) growTable(target) - } - - override def addAll(xs: IterableOnce[(K, V)]^): this.type = { - sizeHint(xs.knownSize) - - xs match { - case hm: immutable.HashMap[K, V] => - hm.foreachWithHash((k, v, h) => put0(k, v, improveHash(h), getOld = false)) - this - case hm: mutable.HashMap[K, V] => - val iter = hm.nodeIterator - while (iter.hasNext) { - val next = iter.next() - put0(next.key, next.value, next.hash, getOld = false) - } - this - case lhm: mutable.LinkedHashMap[K, V] => - val iter = lhm.entryIterator - while (iter.hasNext) { - val entry = iter.next() - put0(entry.key, entry.value, entry.hash, getOld = false) - } - this - case thatMap: Map[K, V] => - thatMap.foreachEntry { (key: K, value: V) => - put0(key, value, improveHash(key.##), getOld = false) - } - this - case _ => - super.addAll(xs) - } - } - - // Override updateWith for performance, so we can do the update while hashing - // the input key only once and performing one lookup into the hash table - override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { - if (getClass != classOf[HashMap[_, _]]) { - // subclasses of HashMap might customise `get` ... - super.updateWith(key)(remappingFunction) - } else { - val hash = computeHash(key) - val indexedHash = index(hash) - - var foundNode: Node[K, V] = null - var previousNode: Node[K, V] = null - table(indexedHash) match { - case null => - case nd => - @tailrec - def findNode(prev: Node[K, V], nd: Node[K, V], k: K, h: Int): Unit = { - if (h == nd.hash && k == nd.key) { - previousNode = prev - foundNode = nd - } - else if ((nd.next eq null) || (nd.hash > h)) () - else findNode(nd, nd.next, k, h) - } - - findNode(null, nd, key, hash) - } - - val previousValue = foundNode match { - case null => None - case nd => Some(nd.value) - } - - val nextValue = remappingFunction(previousValue) - - (previousValue, nextValue) match { - case (None, None) => // do nothing - - case (Some(_), None) => - if (previousNode != null) previousNode.next = foundNode.next - else table(indexedHash) = foundNode.next - contentSize -= 1 - - case (None, Some(value)) => - val newIndexedHash = - if (contentSize + 1 >= threshold) { - growTable(table.length * 2) - index(hash) - } else indexedHash - put0(key, value, false, hash, newIndexedHash) - - case (Some(_), Some(newValue)) => foundNode.value = newValue - } - nextValue - } - } - - override def subtractAll(xs: IterableOnce[K]^): this.type = { - if (size == 0) { - return this - } - - xs match { - case hs: immutable.HashSet[K] => - hs.foreachWithHashWhile { (k, h) => - remove0(k, improveHash(h)) - size > 0 - } - this - case hs: mutable.HashSet[K] => - val iter = hs.nodeIterator - while (iter.hasNext) { - val next = iter.next() - remove0(next.key, next.hash) - if (size == 0) return this - } - this - case lhs: mutable.LinkedHashSet[K] => - val iter = lhs.entryIterator - while (iter.hasNext) { - val next = iter.next() - remove0(next.key, next.hash) - if (size == 0) return this - } - this - case _ => super.subtractAll(xs) - } - } - - /** Adds a key-value pair to this map - * - * @param key the key to add - * @param value the value to add - * @param hash the **improved** hashcode of `key` (see computeHash) - * @param getOld if true, then the previous value for `key` will be returned, otherwise, false - */ - private[this] def put0(key: K, value: V, hash: Int, getOld: Boolean): Some[V] = { - if(contentSize + 1 >= threshold) growTable(table.length * 2) - val idx = index(hash) - put0(key, value, getOld, hash, idx) - } - - private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { - if(contentSize + 1 >= threshold) growTable(table.length * 2) - val hash = computeHash(key) - val idx = index(hash) - put0(key, value, getOld, hash, idx) - } - - - private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { - table(idx) match { - case null => - table(idx) = new Node[K, V](key, hash, value, null) - case old => - var prev: Node[K, V] = null - var n = old - while((n ne null) && n.hash <= hash) { - if(n.hash == hash && key == n.key) { - val old = n.value - n.value = value - return if(getOld) Some(old) else null - } - prev = n - n = n.next - } - if(prev eq null) table(idx) = new Node(key, hash, value, old) - else prev.next = new Node(key, hash, value, prev.next) - } - contentSize += 1 - null - } - - private def remove0(elem: K) : Node[K, V] = remove0(elem, computeHash(elem)) - - /** Removes a key from this map if it exists - * - * @param elem the element to remove - * @param hash the **improved** hashcode of `element` (see computeHash) - * @return the node that contained element if it was present, otherwise null - */ - private[this] def remove0(elem: K, hash: Int) : Node[K, V] = { - val idx = index(hash) - table(idx) match { - case null => null - case nd if nd.hash == hash && nd.key == elem => - // first element matches - table(idx) = nd.next - contentSize -= 1 - nd - case nd => - // find an element that matches - var prev = nd - var next = nd.next - while((next ne null) && next.hash <= hash) { - if(next.hash == hash && next.key == elem) { - prev.next = next.next - contentSize -= 1 - return next - } - prev = next - next = next.next - } - null - } - } - - private[this] abstract class HashMapIterator[A] extends AbstractIterator[A] { - private[this] var i = 0 - private[this] var node: Node[K, V] = null - private[this] val len = table.length - - protected[this] def extract(nd: Node[K, V]): A - - def hasNext: Boolean = { - if(node ne null) true - else { - while(i < len) { - val n = table(i) - i += 1 - if(n ne null) { node = n; return true } - } - false - } - } - - def next(): A = - if(!hasNext) Iterator.empty.next() - else { - val r = extract(node) - node = node.next - r - } - } - - override def iterator: Iterator[(K, V)] = - if(size == 0) Iterator.empty - else new HashMapIterator[(K, V)] { - protected[this] def extract(nd: Node[K, V]) = (nd.key, nd.value) - } - - override def keysIterator: Iterator[K] = - if(size == 0) Iterator.empty - else new HashMapIterator[K] { - protected[this] def extract(nd: Node[K, V]) = nd.key - } - - override def valuesIterator: Iterator[V] = - if(size == 0) Iterator.empty - else new HashMapIterator[V] { - protected[this] def extract(nd: Node[K, V]) = nd.value - } - - - /** Returns an iterator over the nodes stored in this HashMap */ - private[collection] def nodeIterator: Iterator[Node[K, V]] = - if(size == 0) Iterator.empty - else new HashMapIterator[Node[K, V]] { - protected[this] def extract(nd: Node[K, V]) = nd - } - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = - shape. - parUnbox(new convert.impl.AnyTableStepper[(K, V), Node[K, V]](size, table, _.next, node => (node.key, node.value), 0, table.length)). - asInstanceOf[S with EfficientSplit] - - override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { - import convert.impl._ - val s = shape.shape match { - case StepperShape.IntShape => new IntTableStepper[Node[K, V]] (size, table, _.next, _.key.asInstanceOf[Int], 0, table.length) - case StepperShape.LongShape => new LongTableStepper[Node[K, V]] (size, table, _.next, _.key.asInstanceOf[Long], 0, table.length) - case StepperShape.DoubleShape => new DoubleTableStepper[Node[K, V]](size, table, _.next, _.key.asInstanceOf[Double], 0, table.length) - case _ => shape.parUnbox(new AnyTableStepper[K, Node[K, V]](size, table, _.next, _.key, 0, table.length)) - } - s.asInstanceOf[S with EfficientSplit] - } - - override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { - import convert.impl._ - val s = shape.shape match { - case StepperShape.IntShape => new IntTableStepper[Node[K, V]] (size, table, _.next, _.value.asInstanceOf[Int], 0, table.length) - case StepperShape.LongShape => new LongTableStepper[Node[K, V]] (size, table, _.next, _.value.asInstanceOf[Long], 0, table.length) - case StepperShape.DoubleShape => new DoubleTableStepper[Node[K, V]](size, table, _.next, _.value.asInstanceOf[Double], 0, table.length) - case _ => shape.parUnbox(new AnyTableStepper[V, Node[K, V]](size, table, _.next, _.value, 0, table.length)) - } - s.asInstanceOf[S with EfficientSplit] - } - - private[this] def growTable(newlen: Int) = { - if (newlen < 0) - throw new RuntimeException(s"new HashMap table size $newlen exceeds maximum") - var oldlen = table.length - threshold = newThreshold(newlen) - if(size == 0) table = new Array(newlen) - else { - table = java.util.Arrays.copyOf(table, newlen) - val preLow: Node[K, V] = new Node(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) - val preHigh: Node[K, V] = new Node(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) - // Split buckets until the new length has been reached. This could be done more - // efficiently when growing an already filled table to more than double the size. - while(oldlen < newlen) { - var i = 0 - while (i < oldlen) { - val old = table(i) - if(old ne null) { - preLow.next = null - preHigh.next = null - var lastLow: Node[K, V] = preLow - var lastHigh: Node[K, V] = preHigh - var n = old - while(n ne null) { - val next = n.next - if((n.hash & oldlen) == 0) { // keep low - lastLow.next = n - lastLow = n - } else { // move to high - lastHigh.next = n - lastHigh = n - } - n = next - } - lastLow.next = null - if(old ne preLow.next) table(i) = preLow.next - if(preHigh.next ne null) { - table(i + oldlen) = preHigh.next - lastHigh.next = null - } - } - i += 1 - } - oldlen *= 2 - } - } - } - - private[this] def tableSizeFor(capacity: Int) = - (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) - - private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt - - override def clear(): Unit = { - java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) - contentSize = 0 - } - - def get(key: K): Option[V] = findNode(key) match { - case null => None - case nd => Some(nd.value) - } - - @throws[NoSuchElementException] - override def apply(key: K): V = findNode(key) match { - case null => default(key) - case nd => nd.value - } - - override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { - if (getClass != classOf[HashMap[_, _]]) { - // subclasses of HashMap might customise `get` ... - super.getOrElse(key, default) - } else { - // .. but in the common case, we can avoid the Option boxing. - val nd = findNode(key) - if (nd eq null) default else nd.value - } - } - - override def getOrElseUpdate(key: K, defaultValue: => V): V = { - if (getClass != classOf[HashMap[_, _]]) { - // subclasses of HashMap might customise `get` ... - super.getOrElseUpdate(key, defaultValue) - } else { - val hash = computeHash(key) - val idx = index(hash) - val nd = table(idx) match { - case null => null - case nd => nd.findNode(key, hash) - } - if(nd != null) nd.value - else { - val table0 = table - val default = defaultValue - if(contentSize + 1 >= threshold) growTable(table.length * 2) - // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. - val newIdx = if (table0 eq table) idx else index(hash) - put0(key, default, false, hash, newIdx) - default - } - } - } - - override def put(key: K, value: V): Option[V] = put0(key, value, true) match { - case null => None - case sm => sm - } - - override def remove(key: K): Option[V] = remove0(key) match { - case null => None - case nd => Some(nd.value) - } - - override def update(key: K, value: V): Unit = put0(key, value, false) - - def addOne(elem: (K, V)): this.type = { put0(elem._1, elem._2, false); this } - - def subtractOne(elem: K): this.type = { remove0(elem); this } - - override def knownSize: Int = size - - override def isEmpty: Boolean = size == 0 - - override def foreach[U](f: ((K, V)) => U): Unit = { - val len = table.length - var i = 0 - while(i < len) { - val n = table(i) - if(n ne null) n.foreach(f) - i += 1 - } - } - - override def foreachEntry[U](f: (K, V) => U): Unit = { - val len = table.length - var i = 0 - while(i < len) { - val n = table(i) - if(n ne null) n.foreachEntry(f) - i += 1 - } - } - - protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new mutable.HashMap.DeserializationFactory[K, V](table.length, loadFactor), this) - - override def filterInPlace(p: (K, V) => Boolean): this.type = { - if (nonEmpty) { - var bucket = 0 - - while (bucket < table.length) { - var head = table(bucket) - - while ((head ne null) && !p(head.key, head.value)) { - head = head.next - contentSize -= 1 - } - - if (head ne null) { - var prev = head - var next = head.next - - while (next ne null) { - if (p(next.key, next.value)) { - prev = next - } else { - prev.next = next.next - contentSize -= 1 - } - next = next.next - } - } - - table(bucket) = head - bucket += 1 - } - } - this - } - - // TODO: rename to `mapValuesInPlace` and override the base version (not binary compatible) - private[mutable] def mapValuesInPlaceImpl(f: (K, V) => V): this.type = { - val len = table.length - var i = 0 - while (i < len) { - var n = table(i) - while (n ne null) { - n.value = f(n.key, n.value) - n = n.next - } - i += 1 - } - this - } - - override def mapFactory: MapFactory[HashMap] = HashMap - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "HashMap" - - override def hashCode: Int = { - if (isEmpty) MurmurHash3.emptyMapHash - else { - val tupleHashIterator = new HashMapIterator[Any] { - var hash: Int = 0 - override def hashCode: Int = hash - override protected[this] def extract(nd: Node[K, V]): Any = { - hash = MurmurHash3.tuple2Hash(unimproveHash(nd.hash), nd.value.##) - this - } - } - MurmurHash3.unorderedHash(tupleHashIterator, MurmurHash3.mapSeed) - } - } -} - -/** - * $factoryInfo - * @define Coll `mutable.HashMap` - * @define coll mutable hash map - */ -@SerialVersionUID(3L) -object HashMap extends MapFactory[HashMap] { - - def empty[sealed K, sealed V]: HashMap[K, V] = new HashMap[K, V] - - def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^): HashMap[K, V] = { - val k = it.knownSize - val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity - new HashMap[K, V](cap, defaultLoadFactor).addAll(it) - } - - def newBuilder[sealed K, sealed V]: Builder[(K, V), HashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) - - def newBuilder[sealed K, sealed V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), HashMap[K, V]] = - new GrowableBuilder[(K, V), HashMap[K, V]](new HashMap[K, V](initialCapacity, loadFactor)) { - override def sizeHint(size: Int) = elems.sizeHint(size) - } - - /** The default load factor for the hash table */ - final def defaultLoadFactor: Double = 0.75 - - /** The default initial capacity for the hash table */ - final def defaultInitialCapacity: Int = 16 - - @SerialVersionUID(3L) - private final class DeserializationFactory[sealed K, sealed V](val tableLength: Int, val loadFactor: Double) extends Factory[(K, V), HashMap[K, V]] with Serializable { - def fromSpecific(it: IterableOnce[(K, V)]^): HashMap[K, V] = new HashMap[K, V](tableLength, loadFactor).addAll(it) - def newBuilder: Builder[(K, V), HashMap[K, V]] = HashMap.newBuilder(tableLength, loadFactor) - } - - private[collection] final class Node[K, V](_key: K, _hash: Int, private[this] var _value: V, private[this] var _next: Node[K, V]) { - def key: K = _key - def hash: Int = _hash - def value: V = _value - def value_= (v: V): Unit = _value = v - def next: Node[K, V] = _next - def next_= (n: Node[K, V]): Unit = _next = n - - @tailrec - def findNode(k: K, h: Int): Node[K, V] = - if(h == _hash && k == _key) this - else if((_next eq null) || (_hash > h)) null - else _next.findNode(k, h) - - @tailrec - def foreach[U](f: ((K, V)) => U): Unit = { - f((_key, _value)) - if(_next ne null) _next.foreach(f) - } - - @tailrec - def foreachEntry[U](f: (K, V) => U): Unit = { - f(_key, _value) - if(_next ne null) _next.foreachEntry(f) - } - - override def toString = s"Node($key, $value, $hash) -> $next" - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/HashSet.scala b/tests/pos-special/stdlib/collection/mutable/HashSet.scala deleted file mode 100644 index e8c055ff15ef..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/HashSet.scala +++ /dev/null @@ -1,457 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import scala.annotation.tailrec -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializationProxy -import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking - -/** This class implements mutable sets using a hashtable. - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] - * section on `Hash Tables` for more information. - * - * @define Coll `mutable.HashSet` - * @define coll mutable hash set - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -final class HashSet[sealed A](initialCapacity: Int, loadFactor: Double) - extends AbstractSet[A] - with SetOps[A, HashSet, HashSet[A]] - with StrictOptimizedIterableOps[A, HashSet, HashSet[A]] - with IterableFactoryDefaults[A, HashSet] - with Serializable { - - def this() = this(HashSet.defaultInitialCapacity, HashSet.defaultLoadFactor) - - import HashSet.Node - - /* The Hashset class holds the following invariant: - * - For each i between 0 and table.length, the bucket at table(i) only contains elements whose hash-index is i. - * - Every bucket is sorted in ascendent hash order - * - The sum of the lengths of all buckets is equal to contentSize. - */ - /** The actual hash table. */ - private[this] var table = new Array[Node[A]](tableSizeFor(initialCapacity)) - - /** The next size value at which to resize (capacity * load factor). */ - private[this] var threshold: Int = newThreshold(table.length) - - private[this] var contentSize = 0 - - override def size: Int = contentSize - - /** Performs the inverse operation of improveHash. In this case, it happens to be identical to improveHash*/ - @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) - - /** Computes the improved hash of an original (`any.##`) hash. */ - private[this] def improveHash(originalHash: Int): Int = { - // Improve the hash by xoring the high 16 bits into the low 16 bits just in case entropy is skewed towards the - // high-value bits. We only use the lowest bits to determine the hash bucket. This is the same improvement - // algorithm as in java.util.HashMap. - originalHash ^ (originalHash >>> 16) - } - - /** Computes the improved hash of this element */ - @`inline` private[this] def computeHash(o: A): Int = improveHash(o.##) - - @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) - - override def contains(elem: A): Boolean = findNode(elem) ne null - - @`inline` private[this] def findNode(elem: A): Node[A] = { - val hash = computeHash(elem) - table(index(hash)) match { - case null => null - case nd => nd.findNode(elem, hash) - } - } - - override def sizeHint(size: Int): Unit = { - val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) - if(target > table.length) growTable(target) - } - - override def add(elem: A) : Boolean = { - if(contentSize + 1 >= threshold) growTable(table.length * 2) - addElem(elem, computeHash(elem)) - } - - override def addAll(xs: IterableOnce[A]^): this.type = { - sizeHint(xs.knownSize) - xs match { - case hs: immutable.HashSet[A] => - hs.foreachWithHash((k, h) => addElem(k, improveHash(h))) - this - case hs: mutable.HashSet[A] => - val iter = hs.nodeIterator - while (iter.hasNext) { - val next = iter.next() - addElem(next.key, next.hash) - } - this - case lhs: mutable.LinkedHashSet[A] => - val iter = lhs.entryIterator - while (iter.hasNext) { - val next = iter.next() - addElem(next.key, next.hash) - } - this - case _ => super.addAll(xs) - } - } - - override def subtractAll(xs: IterableOnce[A]^): this.type = { - if (size == 0) { - return this - } - - xs match { - case hs: immutable.HashSet[A] => - hs.foreachWithHashWhile { (k, h) => - remove(k, improveHash(h)) - size > 0 - } - this - case hs: mutable.HashSet[A] => - val iter = hs.nodeIterator - while (iter.hasNext) { - val next = iter.next() - remove(next.key, next.hash) - if (size == 0) return this - } - this - case lhs: mutable.LinkedHashSet[A] => - val iter = lhs.entryIterator - while (iter.hasNext) { - val next = iter.next() - remove(next.key, next.hash) - if (size == 0) return this - } - this - case _ => super.subtractAll(xs) - } - } - - /** Adds an element to this set - * @param elem element to add - * @param hash the **improved** hash of `elem` (see computeHash) - */ - private[this] def addElem(elem: A, hash: Int) : Boolean = { - val idx = index(hash) - table(idx) match { - case null => - table(idx) = new Node(elem, hash, null) - case old => - var prev: Node[A] = null - var n = old - while((n ne null) && n.hash <= hash) { - if(n.hash == hash && elem == n.key) return false - prev = n - n = n.next - } - if(prev eq null) - table(idx) = new Node(elem, hash, old) - else - prev.next = new Node(elem, hash, prev.next) - } - contentSize += 1 - true - } - - private[this] def remove(elem: A, hash: Int): Boolean = { - val idx = index(hash) - table(idx) match { - case null => false - case nd if nd.hash == hash && nd.key == elem => - // first element matches - table(idx) = nd.next - contentSize -= 1 - true - case nd => - // find an element that matches - var prev = nd - var next = nd.next - while((next ne null) && next.hash <= hash) { - if(next.hash == hash && next.key == elem) { - prev.next = next.next - contentSize -= 1 - return true - } - prev = next - next = next.next - } - false - } - } - - override def remove(elem: A) : Boolean = remove(elem, computeHash(elem)) - - private[this] abstract class HashSetIterator[B] extends AbstractIterator[B] { - private[this] var i = 0 - private[this] var node: Node[A] = null - private[this] val len = table.length - - protected[this] def extract(nd: Node[A]): B - - def hasNext: Boolean = { - if(node ne null) true - else { - while(i < len) { - val n = table(i) - i += 1 - if(n ne null) { node = n; return true } - } - false - } - } - - def next(): B = - if(!hasNext) Iterator.empty.next() - else { - val r = extract(node) - node = node.next - r - } - } - - override def iterator: Iterator[A] = new HashSetIterator[A] { - override protected[this] def extract(nd: Node[A]): A = nd.key - } - - /** Returns an iterator over the nodes stored in this HashSet */ - private[collection] def nodeIterator: Iterator[Node[A]] = new HashSetIterator[Node[A]] { - override protected[this] def extract(nd: Node[A]): Node[A] = nd - } - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { - import convert.impl._ - val s = shape.shape match { - case StepperShape.IntShape => new IntTableStepper[Node[A]] (size, table, _.next, _.key.asInstanceOf[Int], 0, table.length) - case StepperShape.LongShape => new LongTableStepper[Node[A]] (size, table, _.next, _.key.asInstanceOf[Long], 0, table.length) - case StepperShape.DoubleShape => new DoubleTableStepper[Node[A]](size, table, _.next, _.key.asInstanceOf[Double], 0, table.length) - case _ => shape.parUnbox(new AnyTableStepper[A, Node[A]](size, table, _.next, _.key, 0, table.length)) - } - s.asInstanceOf[S with EfficientSplit] - } - - private[this] def growTable(newlen: Int) = { - var oldlen = table.length - threshold = newThreshold(newlen) - if(size == 0) table = new Array(newlen) - else { - table = java.util.Arrays.copyOf(table, newlen) - val preLow: Node[A] = new Node(null.asInstanceOf[A], 0, null) - val preHigh: Node[A] = new Node(null.asInstanceOf[A], 0, null) - // Split buckets until the new length has been reached. This could be done more - // efficiently when growing an already filled table to more than double the size. - while(oldlen < newlen) { - var i = 0 - while (i < oldlen) { - val old = table(i) - if(old ne null) { - preLow.next = null - preHigh.next = null - var lastLow: Node[A] = preLow - var lastHigh: Node[A] = preHigh - var n = old - while(n ne null) { - val next = n.next - if((n.hash & oldlen) == 0) { // keep low - lastLow.next = n - lastLow = n - } else { // move to high - lastHigh.next = n - lastHigh = n - } - n = next - } - lastLow.next = null - if(old ne preLow.next) table(i) = preLow.next - if(preHigh.next ne null) { - table(i + oldlen) = preHigh.next - lastHigh.next = null - } - } - i += 1 - } - oldlen *= 2 - } - } - } - - override def filterInPlace(p: A => Boolean): this.type = { - if (nonEmpty) { - var bucket = 0 - - while (bucket < table.length) { - var head = table(bucket) - - while ((head ne null) && !p(head.key)) { - head = head.next - contentSize -= 1 - } - - if (head ne null) { - var prev = head - var next = head.next - - while (next ne null) { - if (p(next.key)) { - prev = next - } else { - prev.next = next.next - contentSize -= 1 - } - next = next.next - } - } - - table(bucket) = head - bucket += 1 - } - } - this - } - - /* - private[mutable] def checkTable(): Unit = { - var i = 0 - var count = 0 - var prev: Node[A] = null - while(i < table.length) { - var n = table(i) - prev = null - while(n != null) { - count += 1 - assert(index(n.hash) == i) - if(prev ne null) assert(prev.hash <= n.hash) - prev = n - n = n.next - } - i += 1 - } - assert(contentSize == count) - } - */ - - private[this] def tableSizeFor(capacity: Int) = - (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) - - private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt - - def clear(): Unit = { - java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) - contentSize = 0 - } - - override def iterableFactory: IterableFactory[HashSet] = HashSet - - @`inline` def addOne(elem: A): this.type = { add(elem); this } - - @`inline` def subtractOne(elem: A): this.type = { remove(elem); this } - - override def knownSize: Int = size - - override def isEmpty: Boolean = size == 0 - - override def foreach[U](f: A => U): Unit = { - val len = table.length - var i = 0 - while(i < len) { - val n = table(i) - if(n ne null) n.foreach(f) - i += 1 - } - } - - protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new HashSet.DeserializationFactory[A](table.length, loadFactor), this) - - override protected[this] def className = "HashSet" - - override def hashCode: Int = { - val setIterator = this.iterator - val hashIterator: Iterator[Any] = - if (setIterator.isEmpty) setIterator - else new HashSetIterator[Any] { - var hash: Int = 0 - override def hashCode: Int = hash - override protected[this] def extract(nd: Node[A]): Any = { - hash = unimproveHash(nd.hash) - this - } - } - MurmurHash3.unorderedHash(hashIterator, MurmurHash3.setSeed) - } -} - -/** - * $factoryInfo - * @define Coll `mutable.HashSet` - * @define coll mutable hash set - */ -@SerialVersionUID(3L) -object HashSet extends IterableFactory[HashSet] { - - def from[sealed B](it: scala.collection.IterableOnce[B]^): HashSet[B] = { - val k = it.knownSize - val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity - new HashSet[B](cap, defaultLoadFactor) ++= it - } - - def empty[sealed A]: HashSet[A] = new HashSet[A] - - def newBuilder[sealed A]: Builder[A, HashSet[A]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) - - def newBuilder[sealed A](initialCapacity: Int, loadFactor: Double): Builder[A, HashSet[A]] = - new GrowableBuilder[A, HashSet[A]](new HashSet[A](initialCapacity, loadFactor)) { - override def sizeHint(size: Int) = elems.sizeHint(size) - } - - /** The default load factor for the hash table */ - final def defaultLoadFactor: Double = 0.75 - - /** The default initial capacity for the hash table */ - final def defaultInitialCapacity: Int = 16 - - @SerialVersionUID(3L) - private final class DeserializationFactory[sealed A](val tableLength: Int, val loadFactor: Double) extends Factory[A, HashSet[A]] with Serializable { - def fromSpecific(it: IterableOnce[A]^): HashSet[A] = new HashSet[A](tableLength, loadFactor) ++= it - def newBuilder: Builder[A, HashSet[A]] = HashSet.newBuilder(tableLength, loadFactor) - } - - private[collection] final class Node[K](_key: K, _hash: Int, private[this] var _next: Node[K]) { - def key: K = _key - def hash: Int = _hash - def next: Node[K] = _next - def next_= (n: Node[K]): Unit = _next = n - - @tailrec - def findNode(k: K, h: Int): Node[K] = - if(h == _hash && k == _key) this - else if((_next eq null) || (_hash > h)) null - else _next.findNode(k, h) - - @tailrec - def foreach[U](f: K => U): Unit = { - f(_key) - if(_next ne null) _next.foreach(f) - } - - override def toString = s"Node($key, $hash) -> $next" - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/HashTable.scala b/tests/pos-special/stdlib/collection/mutable/HashTable.scala deleted file mode 100644 index a3534e322cf3..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/HashTable.scala +++ /dev/null @@ -1,418 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.mutable - -import collection.{AbstractIterator, Iterator} - -import java.lang.Integer.{numberOfLeadingZeros, rotateRight} -import scala.util.hashing.byteswap32 - -import java.lang.Integer -import language.experimental.captureChecking - -/** This class can be used to construct data structures that are based - * on hashtables. Class `HashTable[A]` implements a hashtable - * that maps keys of type `A` to values of the fully abstract - * member type `Entry`. Classes that make use of `HashTable` - * have to provide an implementation for `Entry`. - * - * There are mainly two parameters that affect the performance of a hashtable: - * the initial size and the load factor. The size - * refers to the number of buckets in the hashtable, and the load - * factor is a measure of how full the hashtable is allowed to get before - * its size is automatically doubled. Both parameters may be changed by - * overriding the corresponding values in class `HashTable`. - * - * @tparam A type of the elements contained in this hash table. - */ -// Not used in the standard library, but used in scala-parallel-collections -private[collection] trait HashTable[sealed A, B, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashUtils[A] { - // Replacing Entry type parameter by abstract type member here allows to not expose to public - // implementation-specific entry classes such as `DefaultEntry` or `LinkedEntry`. - // However, I'm afraid it's too late now for such breaking change. - import HashTable._ - - protected var _loadFactor = defaultLoadFactor - - /** The actual hash table. - */ - protected[collection] var table: Array[HashEntry[A, Entry]] = new Array(initialCapacity) - - /** The number of mappings contained in this hash table. - */ - protected[collection] var tableSize: Int = 0 - - final def size: Int = tableSize - - /** The next size value at which to resize (capacity * load factor). - */ - protected[collection] var threshold: Int = initialThreshold(_loadFactor) - - /** The array keeping track of the number of elements in 32 element blocks. - */ - protected var sizemap: Array[Int] = null - - protected var seedvalue: Int = tableSizeSeed - - protected def tableSizeSeed = Integer.bitCount(table.length - 1) - - /** The initial size of the hash table. - */ - protected def initialSize: Int = 16 - - /** The initial threshold. - */ - private def initialThreshold(_loadFactor: Int): Int = newThreshold(_loadFactor, initialCapacity) - - private def initialCapacity = capacity(initialSize) - - private def lastPopulatedIndex = { - var idx = table.length - 1 - while (table(idx) == null && idx > 0) - idx -= 1 - - idx - } - - /** - * Initializes the collection from the input stream. `readEntry` will be called for each - * entry to be read from the input stream. - */ - private[collection] def init(in: java.io.ObjectInputStream, readEntry: => Entry): Unit = { - _loadFactor = in.readInt() - assert(_loadFactor > 0) - - val size = in.readInt() - tableSize = 0 - assert(size >= 0) - - seedvalue = in.readInt() - - val smDefined = in.readBoolean() - - table = new Array(capacity(sizeForThreshold(_loadFactor, size))) - threshold = newThreshold(_loadFactor, table.length) - - if (smDefined) sizeMapInit(table.length) else sizemap = null - - var index = 0 - while (index < size) { - addEntry(readEntry) - index += 1 - } - } - - /** - * Serializes the collection to the output stream by saving the load factor, collection - * size and collection entries. `writeEntry` is responsible for writing an entry to the stream. - * - * `foreachEntry` determines the order in which the key/value pairs are saved to the stream. To - * deserialize, `init` should be used. - */ - private[collection] def serializeTo(out: java.io.ObjectOutputStream, writeEntry: Entry => Unit): Unit = { - out.writeInt(_loadFactor) - out.writeInt(tableSize) - out.writeInt(seedvalue) - out.writeBoolean(isSizeMapDefined) - - foreachEntry(writeEntry) - } - - /** Find entry with given key in table, null if not found. - */ - final def findEntry(key: A): Entry = - findEntry0(key, index(elemHashCode(key))) - - protected[collection] final def findEntry0(key: A, h: Int): Entry = { - var e = table(h).asInstanceOf[Entry] - while (e != null && !elemEquals(e.key, key)) e = e.next - e - } - - /** Add entry to table - * pre: no entry with same key exists - */ - protected[collection] final def addEntry(e: Entry): Unit = { - addEntry0(e, index(elemHashCode(e.key))) - } - - protected[collection] final def addEntry0(e: Entry, h: Int): Unit = { - e.next = table(h).asInstanceOf[Entry] - table(h) = e - tableSize = tableSize + 1 - nnSizeMapAdd(h) - if (tableSize > threshold) - resize(2 * table.length) - } - - /** Find entry with given key in table, or add new one if not found. - * May be somewhat faster then `findEntry`/`addEntry` pair as it - * computes entry's hash index only once. - * Returns entry found in table or null. - * New entries are created by calling `createNewEntry` method. - */ - def findOrAddEntry(key: A, value: B): Entry = { - val h = index(elemHashCode(key)) - val e = findEntry0(key, h) - if (e ne null) e else { addEntry0(createNewEntry(key, value), h); null } - } - - /** Creates new entry to be immediately inserted into the hashtable. - * This method is guaranteed to be called only once and in case that the entry - * will be added. In other words, an implementation may be side-effecting. - */ - def createNewEntry(key: A, value: B): Entry - - /** Remove entry from table if present. - */ - final def removeEntry(key: A) : Entry = { - removeEntry0(key, index(elemHashCode(key))) - } - /** Remove entry from table if present. - */ - private[collection] final def removeEntry0(key: A, h: Int) : Entry = { - var e = table(h).asInstanceOf[Entry] - if (e != null) { - if (elemEquals(e.key, key)) { - table(h) = e.next - tableSize = tableSize - 1 - nnSizeMapRemove(h) - e.next = null - return e - } else { - var e1 = e.next - while (e1 != null && !elemEquals(e1.key, key)) { - e = e1 - e1 = e1.next - } - if (e1 != null) { - e.next = e1.next - tableSize = tableSize - 1 - nnSizeMapRemove(h) - e1.next = null - return e1 - } - } - } - null - } - - /** An iterator returning all entries. - */ - def entriesIterator: Iterator[Entry] = new AbstractIterator[Entry] { - val iterTable = table - var idx = lastPopulatedIndex - var es = iterTable(idx) - - def hasNext = es != null - def next() = { - val res = es - es = es.next - while (es == null && idx > 0) { - idx = idx - 1 - es = iterTable(idx) - } - res.asInstanceOf[Entry] - } - } - - /** Avoid iterator for a 2x faster traversal. */ - def foreachEntry[U](f: Entry => U): Unit = { - val iterTable = table - var idx = lastPopulatedIndex - var es = iterTable(idx) - - while (es != null) { - val next = es.next // Cache next in case f removes es. - f(es.asInstanceOf[Entry]) - es = next - - while (es == null && idx > 0) { - idx -= 1 - es = iterTable(idx) - } - } - } - - /** Remove all entries from table - */ - def clearTable(): Unit = { - var i = table.length - 1 - while (i >= 0) { table(i) = null; i = i - 1 } - tableSize = 0 - nnSizeMapReset(0) - } - - private def resize(newSize: Int): Unit = { - val oldTable = table - table = new Array(newSize) - nnSizeMapReset(table.length) - var i = oldTable.length - 1 - while (i >= 0) { - var e = oldTable(i) - while (e != null) { - val h = index(elemHashCode(e.key)) - val e1 = e.next - e.next = table(h).asInstanceOf[Entry] - table(h) = e - e = e1 - nnSizeMapAdd(h) - } - i = i - 1 - } - threshold = newThreshold(_loadFactor, newSize) - } - - /* Size map handling code */ - - /* - * The following three sizeMap* functions (Add, Remove, Reset) - * are used to update the size map of the hash table. - * - * The size map logically divides the hash table into `sizeMapBucketSize` element buckets - * by keeping an integer entry for each such bucket. Each integer entry simply denotes - * the number of elements in the corresponding bucket. - * Best understood through an example, see: - * table = [/, 1, /, 6, 90, /, -3, 5] (8 entries) - * sizemap = [ 2 | 3 ] (2 entries) - * where sizeMapBucketSize == 4. - * - * By default the size map is not initialized, so these methods don't do anything, thus, - * their impact on hash table performance is negligible. However, if the hash table - * is converted into a parallel hash table, the size map is initialized, as it will be needed - * there. - */ - protected final def nnSizeMapAdd(h: Int) = if (sizemap ne null) { - sizemap(h >> sizeMapBucketBitSize) += 1 - } - - protected final def nnSizeMapRemove(h: Int) = if (sizemap ne null) { - sizemap(h >> sizeMapBucketBitSize) -= 1 - } - - protected final def nnSizeMapReset(tableLength: Int) = if (sizemap ne null) { - val nsize = calcSizeMapSize(tableLength) - if (sizemap.length != nsize) sizemap = new Array[Int](nsize) - else java.util.Arrays.fill(sizemap, 0) - } - - private[collection] final def totalSizeMapBuckets = if (sizeMapBucketSize < table.length) 1 else table.length / sizeMapBucketSize - - protected final def calcSizeMapSize(tableLength: Int) = (tableLength >> sizeMapBucketBitSize) + 1 - - // discards the previous sizemap and only allocates a new one - protected def sizeMapInit(tableLength: Int): Unit = { - sizemap = new Array[Int](calcSizeMapSize(tableLength)) - } - - // discards the previous sizemap and populates the new one - protected final def sizeMapInitAndRebuild() = { - sizeMapInit(table.length) - - // go through the buckets, count elements - var tableidx = 0 - var bucketidx = 0 - val tbl = table - var tableuntil = 0 - if (tbl.length < sizeMapBucketSize) tableuntil = tbl.length else tableuntil = sizeMapBucketSize - val totalbuckets = totalSizeMapBuckets - while (bucketidx < totalbuckets) { - var currbucketsize = 0 - while (tableidx < tableuntil) { - var e = tbl(tableidx) - while (e ne null) { - currbucketsize += 1 - e = e.next - } - tableidx += 1 - } - sizemap(bucketidx) = currbucketsize - tableuntil += sizeMapBucketSize - bucketidx += 1 - } - } - - private[collection] def printSizeMap() = { - println(sizemap.to(collection.immutable.List)) - } - - protected final def sizeMapDisable() = sizemap = null - - protected final def isSizeMapDefined = sizemap ne null - - // override to automatically initialize the size map - protected def alwaysInitSizeMap = false - - /* End of size map handling code */ - - protected def elemEquals(key1: A, key2: A): Boolean = (key1 == key2) - - /** - * Note: we take the most significant bits of the hashcode, not the lower ones - * this is of crucial importance when populating the table in parallel - */ - protected[collection] final def index(hcode: Int): Int = { - val ones = table.length - 1 - val exponent = Integer.numberOfLeadingZeros(ones) - (improve(hcode, seedvalue) >>> exponent) & ones - } -} - -private[collection] object HashTable { - /** The load factor for the hash table (in 0.001 step). - */ - private[collection] final def defaultLoadFactor: Int = 750 // corresponds to 75% - private[collection] final def loadFactorDenum = 1000 // should be loadFactorDenom, but changing that isn't binary compatible - - private[collection] final def newThreshold(_loadFactor: Int, size: Int) = ((size.toLong * _loadFactor) / loadFactorDenum).toInt - - private[collection] final def sizeForThreshold(_loadFactor: Int, thr: Int) = ((thr.toLong * loadFactorDenum) / _loadFactor).toInt - - private[collection] final def capacity(expectedSize: Int) = nextPositivePowerOfTwo(expectedSize) - - trait HashUtils[KeyType] { - protected final def sizeMapBucketBitSize = 5 - // so that: - protected final def sizeMapBucketSize = 1 << sizeMapBucketBitSize - - protected[collection] def elemHashCode(key: KeyType) = key.## - - /** - * Defer to a high-quality hash in [[scala.util.hashing]]. - * The goal is to distribute across bins as well as possible even if a hash code has low entropy at some bits. - *

- * OLD VERSION - quick, but bad for sequence 0-10000 - little entropy in higher bits - since 2003 - * {{{ - * var h: Int = hcode + ~(hcode << 9) - * h = h ^ (h >>> 14) - * h = h + (h << 4) - * h ^ (h >>> 10) - * }}} - * the rest of the computation is due to SI-5293 - */ - protected final def improve(hcode: Int, seed: Int): Int = rotateRight(byteswap32(hcode), seed) - } - - /** - * Returns a power of two >= `target`. - */ - private[collection] def nextPositivePowerOfTwo(target: Int): Int = 1 << -numberOfLeadingZeros(target - 1) -} - -/** Class used internally. - */ -private[collection] trait HashEntry[A, sealed E <: HashEntry[A, E]] { - val key: A - var next: E = _ -} diff --git a/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala deleted file mode 100644 index 1af98162e9f3..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable -import language.experimental.captureChecking - - -/** - * Reusable builder for immutable collections - */ -abstract class ImmutableBuilder[-A, C <: IterableOnce[_]](empty: C) - extends ReusableBuilder[A, C] { - - protected var elems: C = empty - - def clear(): Unit = { elems = empty } - - def result(): C = elems - - override def knownSize: Int = elems.knownSize -} diff --git a/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala b/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala deleted file mode 100644 index 022970b4c56f..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable -import language.experimental.captureChecking - -trait IndexedSeq[T] extends Seq[T] - with scala.collection.IndexedSeq[T] - with IndexedSeqOps[T, IndexedSeq, IndexedSeq[T]] - with IterableFactoryDefaults[T, IndexedSeq] { - - override def iterableFactory: SeqFactory[IndexedSeq] = IndexedSeq -} - -@SerialVersionUID(3L) -object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](ArrayBuffer) - -trait IndexedSeqOps[A, +CC[_], +C <: AnyRef] - extends scala.collection.IndexedSeqOps[A, CC, C] - with SeqOps[A, CC, C] { - - /** Modifies this $coll by applying a function to all elements of this $coll. - * - * @param f the function to apply to each element. - * @return this $coll modified by replacing all elements with the - * result of applying the given function `f` to each element - * of this $coll. - */ - def mapInPlace(f: A => A): this.type = { - var i = 0 - val siz = size - while (i < siz) { this(i) = f(this(i)); i += 1 } - this - } - - /** Sorts this $coll in place according to an Ordering. - * - * @see [[scala.collection.SeqOps.sorted]] - * @param ord the ordering to be used to compare elements. - * @return modified input $coll sorted according to the ordering `ord`. - */ - def sortInPlace[B >: A]()(implicit ord: Ordering[B]): this.type = { - val len = this.length - if (len > 1) { - val arr = new Array[AnyRef](len) - var i = 0 - for (x <- this) { - arr(i) = x.asInstanceOf[AnyRef] - i += 1 - } - java.util.Arrays.sort(arr, ord.asInstanceOf[Ordering[Object]]) - i = 0 - while (i < arr.length) { - update(i, arr(i).asInstanceOf[A]) - i += 1 - } - } - this - } - - /** Sorts this $coll in place according to a comparison function. - * - * @see [[scala.collection.SeqOps.sortWith]] - */ - def sortInPlaceWith(lt: (A, A) => Boolean): this.type = sortInPlace()(Ordering.fromLessThan(lt)) - - /** Sorts this $coll in place according to the Ordering which results from transforming - * an implicitly given Ordering with a transformation function. - * - * @see [[scala.collection.SeqOps.sortBy]] - */ - def sortInPlaceBy[B](f: A => B)(implicit ord: Ordering[B]): this.type = sortInPlace()(ord on f) - -} diff --git a/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala b/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala deleted file mode 100644 index a253e8738b26..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala +++ /dev/null @@ -1,510 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.annotation.{nowarn, tailrec} -import scala.collection.generic.DefaultSerializable -import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking - - -/** This class implements mutable maps using a hashtable. - * The iterator and all traversal methods of this class visit elements in the order they were inserted. - * - * @tparam K the type of the keys contained in this hash map. - * @tparam V the type of the values assigned to keys in this hash map. - * - * @define Coll `LinkedHashMap` - * @define coll linked hash map - * @define mayNotTerminateInf - * @define willNotTerminateInf - * @define orderDependent - * @define orderDependentFold - */ -@deprecatedInheritance("LinkedHashMap will be made final; use .withDefault for the common use case of computing a default value", "2.13.11") -class LinkedHashMap[sealed K, sealed V] - extends AbstractMap[K, V] - with SeqMap[K, V] - with MapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]] - with StrictOptimizedIterableOps[(K, V), Iterable, LinkedHashMap[K, V]] - with StrictOptimizedMapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]] - with MapFactoryDefaults[K, V, LinkedHashMap, Iterable] - with DefaultSerializable { - - override def mapFactory: MapFactory[LinkedHashMap] = LinkedHashMap - - // stepper / keyStepper / valueStepper are not overridden to use XTableStepper because that stepper - // would not return the elements in insertion order - - private[collection] type Entry = LinkedHashMap.LinkedEntry[K, V] - - private[collection] def _firstEntry: Entry = firstEntry - - protected var firstEntry: Entry = null - - protected var lastEntry: Entry = null - - /* Uses the same implementation as mutable.HashMap. The hashtable holds the following invariant: - * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. - * - Every bucket is sorted in ascendant hash order - * - The sum of the lengths of all buckets is equal to contentSize. - */ - private[this] var table = new Array[Entry](tableSizeFor(LinkedHashMap.defaultinitialSize)) - - private[this] var threshold: Int = newThreshold(table.length) - - private[this] var contentSize = 0 - - override def last: (K, V) = - if (size > 0) (lastEntry.key, lastEntry.value) - else throw new NoSuchElementException("Cannot call .last on empty LinkedHashMap") - - override def lastOption: Option[(K, V)] = - if (size > 0) Some((lastEntry.key, lastEntry.value)) - else None - - override def head: (K, V) = - if (size > 0) (firstEntry.key, firstEntry.value) - else throw new NoSuchElementException("Cannot call .head on empty LinkedHashMap") - - override def headOption: Option[(K, V)] = - if (size > 0) Some((firstEntry.key, firstEntry.value)) - else None - - override def size = contentSize - override def knownSize: Int = size - override def isEmpty: Boolean = size == 0 - - def get(key: K): Option[V] = { - val e = findEntry(key) - if (e == null) None - else Some(e.value) - } - override def sizeHint(size: Int): Unit = { - val target = tableSizeFor(((size + 1).toDouble / LinkedHashMap.defaultLoadFactor).toInt) - if (target > table.length) growTable(target) - } - - override def contains(key: K): Boolean = { - if (getClass eq classOf[LinkedHashMap[_, _]]) - findEntry(key) != null - else - super.contains(key) // A subclass might override `get`, use the default implementation `contains`. - } - - override def put(key: K, value: V): Option[V] = put0(key, value, true) match { - case null => None - case sm => sm - } - - override def update(key: K, value: V): Unit = put0(key, value, false) - - override def remove(key: K): Option[V] = removeEntry0(key) match { - case null => None - case nd => Some(nd.value) - } - - override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { - if (getClass != classOf[LinkedHashMap[_, _]]) { - // subclasses of LinkedHashMap might customise `get` ... - super.getOrElse(key, default) - } else { - // .. but in the common case, we can avoid the Option boxing. - val nd = findEntry(key) - if (nd eq null) default else nd.value - } - } - - override def getOrElseUpdate(key: K, defaultValue: => V): V = { - if (getClass != classOf[LinkedHashMap[_, _]]) { - // subclasses of LinkedHashMap might customise `get` ... - super.getOrElseUpdate(key, defaultValue) - } else { - val hash = computeHash(key) - val idx = index(hash) - val nd = table(idx) match { - case null => null - case nd => nd.findEntry(key, hash) - } - if (nd != null) nd.value - else { - val table0 = table - val default = defaultValue - if (contentSize + 1 >= threshold) growTable(table.length * 2) - // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. - val newIdx = if (table0 eq table) idx else index(hash) - put0(key, default, false, hash, newIdx) - default - } - } - } - - private[this] def removeEntry0(elem: K): Entry = removeEntry0(elem, computeHash(elem)) - - /** Removes a key from this map if it exists - * - * @param elem the element to remove - * @param hash the **improved** hashcode of `element` (see computeHash) - * @return the node that contained element if it was present, otherwise null - */ - private[this] def removeEntry0(elem: K, hash: Int): Entry = { - val idx = index(hash) - table(idx) match { - case null => null - case nd if nd.hash == hash && nd.key == elem => - // first element matches - table(idx) = nd.next - deleteEntry(nd) - contentSize -= 1 - nd - case nd => - // find an element that matches - var prev = nd - var next = nd.next - while ((next ne null) && next.hash <= hash) { - if (next.hash == hash && next.key == elem) { - prev.next = next.next - deleteEntry(next) - contentSize -= 1 - return next - } - prev = next - next = next.next - } - null - } - } - - /** Computes the improved hash of an original (`any.##`) hash. */ - @`inline` private[this] def improveHash(originalHash: Int): Int = { - originalHash ^ (originalHash >>> 16) - } - @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) - - /** Computes the improved hash of this key */ - @`inline` private[this] def computeHash(o: K): Int = improveHash(o.##) - - @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) - - @`inline` private[this] def findEntry(key: K): Entry = { - val hash = computeHash(key) - table(index(hash)) match { - case null => null - case nd => nd.findEntry(key, hash) - } - } - - def addOne(kv: (K, V)): this.type = { - put(kv._1, kv._2) - this - } - - def subtractOne(key: K): this.type = { - remove(key) - this - } - - private[this] abstract class LinkedHashMapIterator[T] extends AbstractIterator[T] { - private[this] var cur = firstEntry - def extract(nd: Entry): T - def hasNext: Boolean = cur ne null - def next(): T = - if (hasNext) { val r = extract(cur); cur = cur.later; r } - else Iterator.empty.next() - } - - def iterator: Iterator[(K, V)] = - if (size == 0) Iterator.empty - else new LinkedHashMapIterator[(K, V)] { - def extract(nd: Entry): (K, V) = (nd.key, nd.value) - } - - protected class LinkedKeySet extends KeySet { - override def iterableFactory: IterableFactory[collection.Set] = LinkedHashSet - } - - override def keySet: collection.Set[K] = new LinkedKeySet - - override def keysIterator: Iterator[K] = - if (size == 0) Iterator.empty - else new LinkedHashMapIterator[K] { - def extract(nd: Entry): K = nd.key - } - - private[collection] def entryIterator: Iterator[Entry] = - if (size == 0) Iterator.empty - else new LinkedHashMapIterator[Entry] { - def extract(nd: Entry): Entry = nd - } - - - // Override updateWith for performance, so we can do the update while hashing - // the input key only once and performing one lookup into the hash table - override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { - if (getClass != classOf[LinkedHashMap[_, _]]) { - // subclasses of LinkedHashMap might customise `get` ... - super.updateWith(key)(remappingFunction) - } else { - val hash = computeHash(key) - val indexedHash = index(hash) - - var foundEntry: Entry = null - var previousEntry: Entry = null - table(indexedHash) match { - case null => - case nd => - @tailrec - def findEntry(prev: Entry, nd: Entry, k: K, h: Int): Unit = { - if (h == nd.hash && k == nd.key) { - previousEntry = prev - foundEntry = nd - } - else if ((nd.next eq null) || (nd.hash > h)) () - else findEntry(nd, nd.next, k, h) - } - - findEntry(null, nd, key, hash) - } - - val previousValue = foundEntry match { - case null => None - case nd => Some(nd.value) - } - - val nextValue = remappingFunction(previousValue) - - (previousValue, nextValue) match { - case (None, None) => // do nothing - - case (Some(_), None) => - if (previousEntry != null) previousEntry.next = foundEntry.next - else table(indexedHash) = foundEntry.next - deleteEntry(foundEntry) - contentSize -= 1 - - case (None, Some(value)) => - val newIndexedHash = - if (contentSize + 1 >= threshold) { - growTable(table.length * 2) - index(hash) - } else indexedHash - put0(key, value, false, hash, newIndexedHash) - - case (Some(_), Some(newValue)) => foundEntry.value = newValue - } - nextValue - } - } - - override def valuesIterator: Iterator[V] = - if (size == 0) Iterator.empty - else new LinkedHashMapIterator[V] { - def extract(nd: Entry): V = nd.value - } - - - override def foreach[U](f: ((K, V)) => U): Unit = { - var cur = firstEntry - while (cur ne null) { - f((cur.key, cur.value)) - cur = cur.later - } - } - - override def foreachEntry[U](f: (K, V) => U): Unit = { - var cur = firstEntry - while (cur ne null) { - f(cur.key, cur.value) - cur = cur.later - } - } - - override def clear(): Unit = { - java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) - contentSize = 0 - firstEntry = null - lastEntry = null - } - - private[this] def tableSizeFor(capacity: Int) = - (Integer.highestOneBit((capacity - 1).max(4)) * 2).min(1 << 30) - - private[this] def newThreshold(size: Int) = (size.toDouble * LinkedHashMap.defaultLoadFactor).toInt - - /*create a new entry. If table is empty(firstEntry is null), then the - * new entry will be the firstEntry. If not, just set the new entry to - * be the lastEntry. - * */ - private[this] def createNewEntry(key: K, hash: Int, value: V): Entry = { - val e = new Entry(key, hash, value) - if (firstEntry eq null) firstEntry = e - else { - lastEntry.later = e - e.earlier = lastEntry - } - lastEntry = e - e - } - - /** Delete the entry from the LinkedHashMap, set the `earlier` and `later` pointers correctly */ - private[this] def deleteEntry(e: Entry): Unit = { - if (e.earlier eq null) firstEntry = e.later - else e.earlier.later = e.later - if (e.later eq null) lastEntry = e.earlier - else e.later.earlier = e.earlier - e.earlier = null - e.later = null - e.next = null - } - - private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { - if (contentSize + 1 >= threshold) growTable(table.length * 2) - val hash = computeHash(key) - val idx = index(hash) - put0(key, value, getOld, hash, idx) - } - - private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { - table(idx) match { - case null => - table(idx) = createNewEntry(key, hash, value) - case old => - var prev: Entry = null - var n = old - while ((n ne null) && n.hash <= hash) { - if (n.hash == hash && key == n.key) { - val old = n.value - n.value = value - return if (getOld) Some(old) else null - } - prev = n - n = n.next - } - val nnode = createNewEntry(key, hash, value) - if (prev eq null) { - nnode.next = old - table(idx) = nnode - } else { - nnode.next = prev.next - prev.next = nnode - } - } - contentSize += 1 - null - } - - private[this] def growTable(newlen: Int): Unit = { - if (newlen < 0) - throw new RuntimeException(s"new hash table size $newlen exceeds maximum") - var oldlen = table.length - threshold = newThreshold(newlen) - if (size == 0) table = new Array(newlen) - else { - table = java.util.Arrays.copyOf(table, newlen) - val preLow = new Entry(null.asInstanceOf[K], 0, null.asInstanceOf[V]) - val preHigh = new Entry(null.asInstanceOf[K], 0, null.asInstanceOf[V]) - // Split buckets until the new length has been reached. This could be done more - // efficiently when growing an already filled table to more than double the size. - while (oldlen < newlen) { - var i = 0 - while (i < oldlen) { - val old = table(i) - if (old ne null) { - preLow.next = null - preHigh.next = null - var lastLow = preLow - var lastHigh = preHigh - var n = old - while (n ne null) { - val next = n.next - if ((n.hash & oldlen) == 0) { // keep low - lastLow.next = n - lastLow = n - } else { // move to high - lastHigh.next = n - lastHigh = n - } - n = next - } - lastLow.next = null - if (old ne preLow.next) table(i) = preLow.next - if (preHigh.next ne null) { - table(i + oldlen) = preHigh.next - lastHigh.next = null - } - } - i += 1 - } - oldlen *= 2 - } - } - } - - override def hashCode: Int = { - if (isEmpty) MurmurHash3.emptyMapHash - else { - val tupleHashIterator = new LinkedHashMapIterator[Any] { - var hash: Int = 0 - override def hashCode: Int = hash - override def extract(nd: Entry): Any = { - hash = MurmurHash3.tuple2Hash(unimproveHash(nd.hash), nd.value.##) - this - } - } - MurmurHash3.unorderedHash(tupleHashIterator, MurmurHash3.mapSeed) - } - } - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "LinkedHashMap" -} - -/** $factoryInfo - * @define Coll `LinkedHashMap` - * @define coll linked hash map - */ -@SerialVersionUID(3L) -object LinkedHashMap extends MapFactory[LinkedHashMap] { - - def empty[sealed K, sealed V] = new LinkedHashMap[K, V] - - def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^) = { - val newlhm = empty[K, V] - newlhm.sizeHint(it.knownSize) - newlhm.addAll(it) - newlhm - } - - def newBuilder[sealed K, sealed V] = new GrowableBuilder(empty[K, V]) - - /** Class for the linked hash map entry, used internally. - */ - private[mutable] final class LinkedEntry[sealed K, sealed V](val key: K, val hash: Int, var value: V) { - var earlier: LinkedEntry[K, V] = null - var later: LinkedEntry[K, V] = null - var next: LinkedEntry[K, V] = null - - @tailrec - final def findEntry(k: K, h: Int): LinkedEntry[K, V] = - if (h == hash && k == key) this - else if ((next eq null) || (hash > h)) null - else next.findEntry(k, h) - } - - /** The default load factor for the hash table */ - private[collection] final def defaultLoadFactor: Double = 0.75 - - /** The default initial capacity for the hash table */ - private[collection] final def defaultinitialSize: Int = 16 -} diff --git a/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala b/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala deleted file mode 100644 index a895034a852c..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala +++ /dev/null @@ -1,349 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.annotation.{nowarn, tailrec} -import scala.collection.generic.DefaultSerializable -import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking - -/** This class implements mutable sets using a hashtable. - * The iterator and all traversal methods of this class visit elements in the order they were inserted. - * - * @tparam A the type of the elements contained in this set. - * - * @define Coll `LinkedHashSet` - * @define coll linked hash set - * @define mayNotTerminateInf - * @define willNotTerminateInf - * @define orderDependent - * @define orderDependentFold - */ -@deprecatedInheritance("LinkedHashSet will be made final", "2.13.11") -class LinkedHashSet[sealed A] - extends AbstractSet[A] - with SetOps[A, LinkedHashSet, LinkedHashSet[A]] - with StrictOptimizedIterableOps[A, LinkedHashSet, LinkedHashSet[A]] - with IterableFactoryDefaults[A, LinkedHashSet] - with DefaultSerializable { - - override def iterableFactory: IterableFactory[LinkedHashSet] = LinkedHashSet - - // stepper is not overridden to use XTableStepper because that stepper would not return the - // elements in insertion order - - /*private*/ type Entry = LinkedHashSet.Entry[A] - - protected var firstEntry: Entry = null - - protected var lastEntry: Entry = null - - /* Uses the same implementation as mutable.HashSet. The hashtable holds the following invariant: - * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. - * - Every bucket is sorted in ascendant hash order - * - The sum of the lengths of all buckets is equal to contentSize. - */ - private[this] var table = new Array[Entry](tableSizeFor(LinkedHashSet.defaultinitialSize)) - - private[this] var threshold: Int = newThreshold(table.length) - - private[this] var contentSize = 0 - - override def last: A = - if (size > 0) lastEntry.key - else throw new NoSuchElementException("Cannot call .last on empty LinkedHashSet") - - override def lastOption: Option[A] = - if (size > 0) Some(lastEntry.key) - else None - - override def head: A = - if (size > 0) firstEntry.key - else throw new NoSuchElementException("Cannot call .head on empty LinkedHashSet") - - override def headOption: Option[A] = - if (size > 0) Some(firstEntry.key) - else None - - override def size: Int = contentSize - override def knownSize: Int = size - override def isEmpty: Boolean = size == 0 - - def contains(elem: A): Boolean = findEntry(elem) ne null - - override def sizeHint(size: Int): Unit = { - val target = tableSizeFor(((size + 1).toDouble / LinkedHashSet.defaultLoadFactor).toInt) - if (target > table.length) growTable(target) - } - - override def add(elem: A): Boolean = { - if (contentSize + 1 >= threshold) growTable(table.length * 2) - val hash = computeHash(elem) - put0(elem, hash, index(hash)) - } - - def addOne(elem: A): this.type = { - add(elem) - this - } - - def subtractOne(elem: A): this.type = { - remove(elem) - this - } - - override def remove(elem: A): Boolean = remove0(elem, computeHash(elem)) - - private[this] abstract class LinkedHashSetIterator[T] extends AbstractIterator[T] { - private[this] var cur = firstEntry - def extract(nd: Entry): T - def hasNext: Boolean = cur ne null - def next(): T = - if (hasNext) { val r = extract(cur); cur = cur.later; r } - else Iterator.empty.next() - } - - def iterator: Iterator[A] = new LinkedHashSetIterator[A] { - override def extract(nd: Entry): A = nd.key - } - - private[collection] def entryIterator: Iterator[Entry] = new LinkedHashSetIterator[Entry] { - override def extract(nd: Entry): Entry = nd - } - - override def foreach[U](f: A => U): Unit = { - var cur = firstEntry - while (cur ne null) { - f(cur.key) - cur = cur.later - } - } - - override def clear(): Unit = { - java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) - contentSize = 0 - firstEntry = null - lastEntry = null - } - - private[this] def tableSizeFor(capacity: Int) = - (Integer.highestOneBit((capacity - 1).max(4)) * 2).min(1 << 30) - - private[this] def newThreshold(size: Int) = (size.toDouble * LinkedHashSet.defaultLoadFactor).toInt - - @`inline` private[this] def improveHash(originalHash: Int): Int = { - originalHash ^ (originalHash >>> 16) - } - - @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) - - /** Computes the improved hash of this key */ - @`inline` private[this] def computeHash(o: A): Int = improveHash(o.##) - - @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) - - @`inline` private[this] def findEntry(key: A): Entry = { - val hash = computeHash(key) - table(index(hash)) match { - case null => null - case nd => nd.findEntry(key, hash) - } - } - - /*create a new entry. If table is empty(firstEntry is null), then the - * new entry will be the firstEntry. If not, just set the new entry to - * be the lastEntry. - * */ - private[this] def createNewEntry(key: A, hash: Int): Entry = { - val e = new Entry(key, hash) - if (firstEntry eq null) firstEntry = e - else { - lastEntry.later = e - e.earlier = lastEntry - } - lastEntry = e - e - } - - /** Delete the entry from the LinkedHashSet, set the `earlier` and `later` pointers correctly */ - private[this] def deleteEntry(e: Entry): Unit = { - if (e.earlier eq null) firstEntry = e.later - else e.earlier.later = e.later - if (e.later eq null) lastEntry = e.earlier - else e.later.earlier = e.earlier - e.earlier = null - e.later = null - e.next = null - } - - private[this] def put0(elem: A, hash: Int, idx: Int): Boolean = { - table(idx) match { - case null => - table(idx) = createNewEntry(elem, hash) - case old => - var prev: Entry = null - var n = old - while ((n ne null) && n.hash <= hash) { - if (n.hash == hash && elem == n.key) return false - prev = n - n = n.next - } - val nnode = createNewEntry(elem, hash) - if (prev eq null) { - nnode.next = old - table(idx) = nnode - } else { - nnode.next = prev.next - prev.next = nnode - } - } - contentSize += 1 - true - } - - private[this] def remove0(elem: A, hash: Int): Boolean = { - val idx = index(hash) - table(idx) match { - case null => false - case nd if nd.hash == hash && nd.key == elem => - // first element matches - table(idx) = nd.next - deleteEntry(nd) - contentSize -= 1 - true - case nd => - // find an element that matches - var prev = nd - var next = nd.next - while ((next ne null) && next.hash <= hash) { - if (next.hash == hash && next.key == elem) { - prev.next = next.next - deleteEntry(next) - contentSize -= 1 - return true - } - prev = next - next = next.next - } - false - } - } - - private[this] def growTable(newlen: Int): Unit = { - if (newlen < 0) - throw new RuntimeException(s"new hash table size $newlen exceeds maximum") - var oldlen = table.length - threshold = newThreshold(newlen) - if (size == 0) table = new Array(newlen) - else { - table = java.util.Arrays.copyOf(table, newlen) - val preLow = new Entry(null.asInstanceOf[A], 0) - val preHigh = new Entry(null.asInstanceOf[A], 0) - // Split buckets until the new length has been reached. This could be done more - // efficiently when growing an already filled table to more than double the size. - while (oldlen < newlen) { - var i = 0 - while (i < oldlen) { - val old = table(i) - if (old ne null) { - preLow.next = null - preHigh.next = null - var lastLow = preLow - var lastHigh = preHigh - var n = old - while (n ne null) { - val next = n.next - if ((n.hash & oldlen) == 0) { // keep low - lastLow.next = n - lastLow = n - } else { // move to high - lastHigh.next = n - lastHigh = n - } - n = next - } - lastLow.next = null - if (old ne preLow.next) table(i) = preLow.next - if (preHigh.next ne null) { - table(i + oldlen) = preHigh.next - lastHigh.next = null - } - } - i += 1 - } - oldlen *= 2 - } - } - } - - override def hashCode: Int = { - val setHashIterator = - if (isEmpty) this.iterator - else { - new LinkedHashSetIterator[Any] { - var hash: Int = 0 - override def hashCode: Int = hash - override def extract(nd: Entry): Any = { - hash = unimproveHash(nd.hash) - this - } - } - } - MurmurHash3.unorderedHash(setHashIterator, MurmurHash3.setSeed) - } - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "LinkedHashSet" -} - -/** $factoryInfo - * @define Coll `LinkedHashSet` - * @define coll linked hash set - */ -@SerialVersionUID(3L) -object LinkedHashSet extends IterableFactory[LinkedHashSet] { - - override def empty[sealed A]: LinkedHashSet[A] = new LinkedHashSet[A] - - def from[sealed E](it: collection.IterableOnce[E]^) = { - val newlhs = empty[E] - newlhs.sizeHint(it.knownSize) - newlhs.addAll(it) - newlhs - } - - def newBuilder[sealed A] = new GrowableBuilder(empty[A]) - - /** Class for the linked hash set entry, used internally. - */ - private[mutable] final class Entry[sealed A](val key: A, val hash: Int) { - var earlier: Entry[A] = null - var later: Entry[A] = null - var next: Entry[A] = null - - @tailrec - final def findEntry(k: A, h: Int): Entry[A] = - if (h == hash && k == key) this - else if ((next eq null) || (hash > h)) null - else next.findEntry(k, h) - } - - /** The default load factor for the hash table */ - private[collection] final def defaultLoadFactor: Double = 0.75 - - /** The default initial capacity for the hash table */ - private[collection] final def defaultinitialSize: Int = 16 -} - diff --git a/tests/pos-special/stdlib/collection/mutable/ListMap.scala b/tests/pos-special/stdlib/collection/mutable/ListMap.scala deleted file mode 100644 index 8ddbc264e47b..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/ListMap.scala +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import scala.annotation.tailrec -import scala.collection.generic.DefaultSerializable -import scala.collection.immutable.List -import language.experimental.captureChecking - -/** A simple mutable map backed by a list, so it preserves insertion order. - * - * @tparam K the type of the keys contained in this list map. - * @tparam V the type of the values assigned to keys in this list map. - * - * @define Coll `mutable.ListMap` - * @define coll mutable list map - * @define mayNotTerminateInf - * @define willNotTerminateInf - * @define orderDependent - * @define orderDependentFold - */ -@deprecated("Use an immutable.ListMap assigned to a var instead of mutable.ListMap", "2.13.0") -class ListMap[sealed K, sealed V] - extends AbstractMap[K, V] - with MapOps[K, V, ListMap, ListMap[K, V]] - with StrictOptimizedIterableOps[(K, V), Iterable, ListMap[K, V]] - with StrictOptimizedMapOps[K, V, ListMap, ListMap[K, V]] - with MapFactoryDefaults[K, V, ListMap, Iterable] - with DefaultSerializable { - - override def mapFactory: MapFactory[ListMap] = ListMap - - private[this] var elems: List[(K, V)] = List() - private[this] var siz: Int = 0 - - def get(key: K): Option[V] = elems find (_._1 == key) map (_._2) - def iterator: Iterator[(K, V)] = elems.iterator - - final override def addOne(kv: (K, V)) = { - val (e, key0) = remove(kv._1, elems, List()) - elems = (key0, kv._2) :: e - siz += 1; this - } - - final override def subtractOne(key: K) = { elems = remove(key, elems, List())._1; this } - - @tailrec - private def remove(key: K, elems: List[(K, V)], acc: List[(K, V)]): (List[(K, V)], K) = { - if (elems.isEmpty) (acc, key) - else if (elems.head._1 == key) { siz -= 1; (acc ::: elems.tail, elems.head._1) } - else remove(key, elems.tail, elems.head :: acc) - } - - final override def clear(): Unit = { elems = List(); siz = 0 } - - final override def size: Int = siz - override def knownSize: Int = size - override def isEmpty: Boolean = size == 0 - override protected[this] def stringPrefix = "ListMap" -} - -/** $factoryInfo - * @define Coll `mutable.ListMap` - * @define coll mutable list map - */ -@SerialVersionUID(3L) -@deprecated("Use an immutable.ListMap assigned to a var instead of mutable.ListMap", "2.13.0") -object ListMap extends MapFactory[ListMap] { - def empty[sealed K, sealed V]: ListMap[K, V] = new ListMap[K, V] - def from[sealed K, sealed V](it: IterableOnce[(K, V)]^): ListMap[K,V] = Growable.from(empty[K, V], it) - def newBuilder[sealed K, sealed V]: Builder[(K, V), ListMap[K,V]] = new GrowableBuilder(empty[K, V]) -} diff --git a/tests/pos-special/stdlib/collection/mutable/LongMap.scala b/tests/pos-special/stdlib/collection/mutable/LongMap.scala deleted file mode 100644 index 2c757160ec77..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/LongMap.scala +++ /dev/null @@ -1,674 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import scala.collection.generic.DefaultSerializationProxy -import scala.language.implicitConversions -import language.experimental.captureChecking - -/** This class implements mutable maps with `Long` keys based on a hash table with open addressing. - * - * Basic map operations on single entries, including `contains` and `get`, - * are typically substantially faster with `LongMap` than [[HashMap]]. Methods - * that act on the whole map, including `foreach` and `map` are not in - * general expected to be faster than with a generic map, save for those - * that take particular advantage of the internal structure of the map: - * `foreachKey`, `foreachValue`, `mapValuesNow`, and `transformValues`. - * - * Maps with open addressing may become less efficient at lookup after - * repeated addition/removal of elements. Although `LongMap` makes a - * decent attempt to remain efficient regardless, calling `repack` - * on a map that will no longer have elements removed but will be - * used heavily may save both time and storage space. - * - * This map is not intended to contain more than 2^29 entries (approximately - * 500 million). The maximum capacity is 2^30, but performance will degrade - * rapidly as 2^30 is approached. - * - */ -final class LongMap[sealed V] private[collection] (defaultEntry: Long -> V, initialBufferSize: Int, initBlank: Boolean) - extends AbstractMap[Long, V] - with MapOps[Long, V, Map, LongMap[V]] - with StrictOptimizedIterableOps[(Long, V), Iterable, LongMap[V]] - with Serializable { - import LongMap._ - - def this() = this(LongMap.exceptionDefault, 16, true) - - // TODO: override clear() with an optimization more tailored for efficiency. - override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, V)]^): LongMap[V] = { - //TODO should this be the default implementation of this method in StrictOptimizedIterableOps? - val b = newSpecificBuilder - b.sizeHint(coll) - b.addAll(coll) - b.result() - } - override protected def newSpecificBuilder: Builder[(Long, V),LongMap[V]] = new GrowableBuilder(LongMap.empty[V]) - - /** Creates a new `LongMap` that returns default values according to a supplied key-value mapping. */ - def this(defaultEntry: Long -> V) = this(defaultEntry, 16, true) - - /** Creates a new `LongMap` with an initial buffer of specified size. - * - * A LongMap can typically contain half as many elements as its buffer size - * before it requires resizing. - */ - def this(initialBufferSize: Int) = this(LongMap.exceptionDefault, initialBufferSize, true) - - /** Creates a new `LongMap` with specified default values and initial buffer size. */ - def this(defaultEntry: Long -> V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) - - private[this] var mask = 0 - private[this] var extraKeys: Int = 0 - private[this] var zeroValue: AnyRef = null - private[this] var minValue: AnyRef = null - private[this] var _size = 0 - private[this] var _vacant = 0 - private[this] var _keys: Array[Long] = null - private[this] var _values: Array[AnyRef] = null - - if (initBlank) defaultInitialize(initialBufferSize) - - private[this] def defaultInitialize(n: Int) = { - mask = - if (n<0) 0x7 - else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7 - _keys = new Array[Long](mask+1) - _values = new Array[AnyRef](mask+1) - } - - private[collection] def initializeTo( - m: Int, ek: Int, zv: AnyRef, mv: AnyRef, sz: Int, vc: Int, kz: Array[Long], vz: Array[AnyRef] - ): Unit = { - mask = m; extraKeys = ek; zeroValue = zv; minValue = mv; _size = sz; _vacant = vc; _keys = kz; _values = vz - } - - override def size: Int = _size + (extraKeys+1)/2 - override def knownSize: Int = size - override def isEmpty: Boolean = size == 0 - override def empty: LongMap[V] = new LongMap() - - private def imbalanced: Boolean = - (_size + _vacant) > 0.5*mask || _vacant > _size - - private def toIndex(k: Long): Int = { - // Part of the MurmurHash3 32 bit finalizer - val h = ((k ^ (k >>> 32)) & 0xFFFFFFFFL).toInt - val x = (h ^ (h >>> 16)) * 0x85EBCA6B - (x ^ (x >>> 13)) & mask - } - - private def seekEmpty(k: Long): Int = { - var e = toIndex(k) - var x = 0 - while (_keys(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } - e - } - - private def seekEntry(k: Long): Int = { - var e = toIndex(k) - var x = 0 - var q = 0L - while ({ q = _keys(e); if (q==k) return e; q != 0}) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } - e | MissingBit - } - - private def seekEntryOrOpen(k: Long): Int = { - var e = toIndex(k) - var x = 0 - var q = 0L - while ({ q = _keys(e); if (q==k) return e; q+q != 0}) { - x += 1 - e = (e + 2*(x+1)*x - 3) & mask - } - if (q == 0) return e | MissingBit - val o = e | MissVacant - while ({ q = _keys(e); if (q==k) return e; q != 0}) { - x += 1 - e = (e + 2*(x+1)*x - 3) & mask - } - o - } - - override def contains(key: Long): Boolean = { - if (key == -key) (((key>>>63).toInt+1) & extraKeys) != 0 - else seekEntry(key) >= 0 - } - - override def get(key: Long): Option[V] = { - if (key == -key) { - if ((((key>>>63).toInt+1) & extraKeys) == 0) None - else if (key == 0) Some(zeroValue.asInstanceOf[V]) - else Some(minValue.asInstanceOf[V]) - } - else { - val i = seekEntry(key) - if (i < 0) None else Some(_values(i).asInstanceOf[V]) - } - } - - override def getOrElse[V1 >: V](key: Long, default: => V1): V1 = { - if (key == -key) { - if ((((key>>>63).toInt+1) & extraKeys) == 0) default - else if (key == 0) zeroValue.asInstanceOf[V1] - else minValue.asInstanceOf[V1] - } - else { - val i = seekEntry(key) - if (i < 0) default else _values(i).asInstanceOf[V1] - } - } - - override def getOrElseUpdate(key: Long, defaultValue: => V): V = { - if (key == -key) { - val kbits = (key>>>63).toInt + 1 - if ((kbits & extraKeys) == 0) { - val value = defaultValue - extraKeys |= kbits - if (key == 0) zeroValue = value.asInstanceOf[AnyRef] - else minValue = value.asInstanceOf[AnyRef] - value - } - else if (key == 0) zeroValue.asInstanceOf[V] - else minValue.asInstanceOf[V] - } - else { - var i = seekEntryOrOpen(key) - if (i < 0) { - // It is possible that the default value computation was side-effecting - // Our hash table may have resized or even contain what we want now - // (but if it does, we'll replace it) - val value = { - val ok = _keys - val ans = defaultValue - if (ok ne _keys) { - i = seekEntryOrOpen(key) - if (i >= 0) _size -= 1 - } - ans - } - _size += 1 - val j = i & IndexMask - _keys(j) = key - _values(j) = value.asInstanceOf[AnyRef] - if ((i & VacantBit) != 0) _vacant -= 1 - else if (imbalanced) repack() - value - } - else _values(i).asInstanceOf[V] - } - } - - /** Retrieves the value associated with a key, or the default for that type if none exists - * (null for AnyRef, 0 for floats and integers). - * - * Note: this is the fastest way to retrieve a value that may or - * may not exist, if the default null/zero is acceptable. For key/value - * pairs that do exist, `apply` (i.e. `map(key)`) is equally fast. - */ - def getOrNull(key: Long): V = { - if (key == -key) { - if ((((key>>>63).toInt+1) & extraKeys) == 0) null.asInstanceOf[V] - else if (key == 0) zeroValue.asInstanceOf[V] - else minValue.asInstanceOf[V] - } - else { - val i = seekEntry(key) - if (i < 0) null.asInstanceOf[V] else _values(i).asInstanceOf[V] - } - } - - /** Retrieves the value associated with a key. - * If the key does not exist in the map, the `defaultEntry` for that key - * will be returned instead. - */ - override def apply(key: Long): V = { - if (key == -key) { - if ((((key>>>63).toInt+1) & extraKeys) == 0) defaultEntry(key) - else if (key == 0) zeroValue.asInstanceOf[V] - else minValue.asInstanceOf[V] - } - else { - val i = seekEntry(key) - if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V] - } - } - - /** The user-supplied default value for the key. Throws an exception - * if no other default behavior was specified. - */ - override def default(key: Long) = defaultEntry(key) - - private def repack(newMask: Int): Unit = { - val ok = _keys - val ov = _values - mask = newMask - _keys = new Array[Long](mask+1) - _values = new Array[AnyRef](mask+1) - _vacant = 0 - var i = 0 - while (i < ok.length) { - val k = ok(i) - if (k != -k) { - val j = seekEmpty(k) - _keys(j) = k - _values(j) = ov(i) - } - i += 1 - } - } - - /** Repacks the contents of this `LongMap` for maximum efficiency of lookup. - * - * For maps that undergo a complex creation process with both addition and - * removal of keys, and then are used heavily with no further removal of - * elements, calling `repack` after the end of the creation can result in - * improved performance. Repacking takes time proportional to the number - * of entries in the map. - */ - def repack(): Unit = { - var m = mask - if (_size + _vacant >= 0.5*mask && !(_vacant > 0.2*mask)) m = ((m << 1) + 1) & IndexMask - while (m > 8 && 8*_size < m) m = m >>> 1 - repack(m) - } - - override def put(key: Long, value: V): Option[V] = { - if (key == -key) { - if (key == 0) { - val ans = if ((extraKeys&1) == 1) Some(zeroValue.asInstanceOf[V]) else None - zeroValue = value.asInstanceOf[AnyRef] - extraKeys |= 1 - ans - } - else { - val ans = if ((extraKeys&2) == 1) Some(minValue.asInstanceOf[V]) else None - minValue = value.asInstanceOf[AnyRef] - extraKeys |= 2 - ans - } - } - else { - val i = seekEntryOrOpen(key) - if (i < 0) { - val j = i & IndexMask - _keys(j) = key - _values(j) = value.asInstanceOf[AnyRef] - _size += 1 - if ((i & VacantBit) != 0) _vacant -= 1 - else if (imbalanced) repack() - None - } - else { - val ans = Some(_values(i).asInstanceOf[V]) - _keys(i) = key - _values(i) = value.asInstanceOf[AnyRef] - ans - } - } - } - - /** Updates the map to include a new key-value pair. - * - * This is the fastest way to add an entry to a `LongMap`. - */ - override def update(key: Long, value: V): Unit = { - if (key == -key) { - if (key == 0) { - zeroValue = value.asInstanceOf[AnyRef] - extraKeys |= 1 - } - else { - minValue = value.asInstanceOf[AnyRef] - extraKeys |= 2 - } - } - else { - val i = seekEntryOrOpen(key) - if (i < 0) { - val j = i & IndexMask - _keys(j) = key - _values(j) = value.asInstanceOf[AnyRef] - _size += 1 - if ((i & VacantBit) != 0) _vacant -= 1 - else if (imbalanced) repack() - } - else { - _keys(i) = key - _values(i) = value.asInstanceOf[AnyRef] - } - } - } - - /** Adds a new key/value pair to this map and returns the map. */ - @deprecated("Use `addOne` or `update` instead; infix operations with an operand of multiple args will be deprecated", "2.13.3") - def +=(key: Long, value: V): this.type = { update(key, value); this } - - /** Adds a new key/value pair to this map and returns the map. */ - @inline final def addOne(key: Long, value: V): this.type = { update(key, value); this } - - @inline override final def addOne(kv: (Long, V)): this.type = { update(kv._1, kv._2); this } - - def subtractOne(key: Long): this.type = { - if (key == -key) { - if (key == 0L) { - extraKeys &= 0x2 - zeroValue = null - } - else { - extraKeys &= 0x1 - minValue = null - } - } - else { - val i = seekEntry(key) - if (i >= 0) { - _size -= 1 - _vacant += 1 - _keys(i) = Long.MinValue - _values(i) = null - } - } - this - } - - def iterator: Iterator[(Long, V)] = new AbstractIterator[(Long, V)] { - private[this] val kz = _keys - private[this] val vz = _values - - private[this] var nextPair: (Long, V) = - if (extraKeys==0) null - else if ((extraKeys&1)==1) (0L, zeroValue.asInstanceOf[V]) - else (Long.MinValue, minValue.asInstanceOf[V]) - - private[this] var anotherPair: (Long, V) = - if (extraKeys==3) (Long.MinValue, minValue.asInstanceOf[V]) - else null - - private[this] var index = 0 - - def hasNext: Boolean = nextPair != null || (index < kz.length && { - var q = kz(index) - while (q == -q) { - index += 1 - if (index >= kz.length) return false - q = kz(index) - } - nextPair = (kz(index), vz(index).asInstanceOf[V]) - index += 1 - true - }) - def next() = { - if (nextPair == null && !hasNext) throw new NoSuchElementException("next") - val ans = nextPair - if (anotherPair != null) { - nextPair = anotherPair - anotherPair = null - } - else nextPair = null - ans - } - } - - // TODO PERF override these for efficiency. See immutable.LongMap for how to organize the code. - override def keysIterator: Iterator[Long] = super.keysIterator - override def valuesIterator: Iterator[V] = super.valuesIterator - - override def foreach[U](f: ((Long,V)) => U): Unit = { - if ((extraKeys & 1) == 1) f((0L, zeroValue.asInstanceOf[V])) - if ((extraKeys & 2) == 2) f((Long.MinValue, minValue.asInstanceOf[V])) - var i,j = 0 - while (i < _keys.length & j < _size) { - val k = _keys(i) - if (k != -k) { - j += 1 - f((k, _values(i).asInstanceOf[V])) - } - i += 1 - } - } - - override def foreachEntry[U](f: (Long,V) => U): Unit = { - if ((extraKeys & 1) == 1) f(0L, zeroValue.asInstanceOf[V]) - if ((extraKeys & 2) == 2) f(Long.MinValue, minValue.asInstanceOf[V]) - var i,j = 0 - while (i < _keys.length & j < _size) { - val k = _keys(i) - if (k != -k) { - j += 1 - f(k, _values(i).asInstanceOf[V]) - } - i += 1 - } - } - - override def clone(): LongMap[V] = { - val kz = java.util.Arrays.copyOf(_keys, _keys.length) - val vz = java.util.Arrays.copyOf(_values, _values.length) - val lm = new LongMap[V](defaultEntry, 1, false) - lm.initializeTo(mask, extraKeys, zeroValue, minValue, _size, _vacant, kz, vz) - lm - } - - @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") - override def +[V1 >: V](kv: (Long, V1)): LongMap[V1] = { - val lm = clone().asInstanceOf[LongMap[V1]] - lm += kv - lm - } - - @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - override def + [sealed V1 >: V](elem1: (Long, V1), elem2: (Long, V1), elems: (Long, V1)*): LongMap[V1] = { - val m = this + elem1 + elem2 - if(elems.isEmpty) m else m.concat(elems) - } - - override def concat[sealed V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = { - val lm = clone().asInstanceOf[LongMap[V1]] - xs.iterator.foreach(kv => lm += kv) - lm - } - - override def ++ [sealed V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = concat(xs) - - @deprecated("Use m.clone().addOne(k,v) instead of m.updated(k, v)", "2.13.0") - override def updated[V1 >: V](key: Long, value: V1): LongMap[V1] = - clone().asInstanceOf[LongMap[V1]].addOne(key, value) - - /** Applies a function to all keys of this map. */ - def foreachKey[A](f: Long => A): Unit = { - if ((extraKeys & 1) == 1) f(0L) - if ((extraKeys & 2) == 2) f(Long.MinValue) - var i,j = 0 - while (i < _keys.length & j < _size) { - val k = _keys(i) - if (k != -k) { - j += 1 - f(k) - } - i += 1 - } - } - - /** Applies a function to all values of this map. */ - def foreachValue[A](f: V => A): Unit = { - if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]) - if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]) - var i,j = 0 - while (i < _keys.length & j < _size) { - val k = _keys(i) - if (k != -k) { - j += 1 - f(_values(i).asInstanceOf[V]) - } - i += 1 - } - } - - /** Creates a new `LongMap` with different values. - * Unlike `mapValues`, this method generates a new - * collection immediately. - */ - def mapValuesNow[sealed V1](f: V => V1): LongMap[V1] = { - val zv = if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null - val mv = if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null - val lm = new LongMap[V1](LongMap.exceptionDefault, 1, false) - val kz = java.util.Arrays.copyOf(_keys, _keys.length) - val vz = new Array[AnyRef](_values.length) - var i,j = 0 - while (i < _keys.length & j < _size) { - val k = _keys(i) - if (k != -k) { - j += 1 - vz(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] - } - i += 1 - } - lm.initializeTo(mask, extraKeys, zv, mv, _size, _vacant, kz, vz) - lm - } - - /** Applies a transformation function to all values stored in this map. - * Note: the default, if any, is not transformed. - */ - @deprecated("Use transformValuesInPlace instead of transformValues", "2.13.0") - @`inline` final def transformValues(f: V => V): this.type = transformValuesInPlace(f) - - /** Applies a transformation function to all values stored in this map. - * Note: the default, if any, is not transformed. - */ - def transformValuesInPlace(f: V => V): this.type = { - if ((extraKeys & 1) == 1) zeroValue = f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] - if ((extraKeys & 2) == 2) minValue = f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] - var i,j = 0 - while (i < _keys.length & j < _size) { - val k = _keys(i) - if (k != -k) { - j += 1 - _values(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] - } - i += 1 - } - this - } - - def map[sealed V2](f: ((Long, V)) => (Long, V2)): LongMap[V2] = LongMap.from(new View.Map(coll, f)) - - def flatMap[sealed V2](f: ((Long, V)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f)) - - def collect[sealed V2](pf: PartialFunction[(Long, V), (Long, V2)]): LongMap[V2] = - strictOptimizedCollect(LongMap.newBuilder[V2], pf) - - protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(LongMap.toFactory[V](LongMap), this) - - override protected[this] def className = "LongMap" -} - -object LongMap { - private final val IndexMask = 0x3FFFFFFF - private final val MissingBit = 0x80000000 - private final val VacantBit = 0x40000000 - private final val MissVacant = 0xC0000000 - - private val exceptionDefault: Long -> Nothing = (k: Long) => throw new NoSuchElementException(k.toString) - - /** A builder for instances of `LongMap`. - * - * This builder can be reused to create multiple instances. - */ - final class LongMapBuilder[sealed V] extends ReusableBuilder[(Long, V), LongMap[V]] { - private[collection] var elems: LongMap[V] = new LongMap[V] - override def addOne(entry: (Long, V)): this.type = { - elems += entry - this - } - def clear(): Unit = elems = new LongMap[V] - def result(): LongMap[V] = elems - override def knownSize: Int = elems.knownSize - } - - /** Creates a new `LongMap` with zero or more key/value pairs. */ - def apply[sealed V](elems: (Long, V)*): LongMap[V] = buildFromIterableOnce(elems) - - private def buildFromIterableOnce[sealed V](elems: IterableOnce[(Long, V)]^): LongMap[V] = { - var sz = elems.knownSize - if(sz < 0) sz = 4 - val lm = new LongMap[V](sz * 2) - elems.iterator.foreach{ case (k,v) => lm(k) = v } - if (lm.size < (sz>>3)) lm.repack() - lm - } - - /** Creates a new empty `LongMap`. */ - def empty[sealed V]: LongMap[V] = new LongMap[V] - - /** Creates a new empty `LongMap` with the supplied default */ - def withDefault[sealed V](default: Long -> V): LongMap[V] = new LongMap[V](default) - - /** Creates a new `LongMap` from an existing source collection. A source collection - * which is already a `LongMap` gets cloned. - * - * @param source Source collection - * @tparam A the type of the collection’s elements - * @return a new `LongMap` with the elements of `source` - */ - def from[sealed V](source: IterableOnce[(Long, V)]^): LongMap[V] = source match { - case source: LongMap[_] => source.clone().asInstanceOf[LongMap[V]] - case _ => buildFromIterableOnce(source) - } - - def newBuilder[sealed V]: ReusableBuilder[(Long, V), LongMap[V]] = new LongMapBuilder[V] - - /** Creates a new `LongMap` from arrays of keys and values. - * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. - */ - def fromZip[sealed V](keys: Array[Long], values: Array[V]): LongMap[V] = { - val sz = math.min(keys.length, values.length) - val lm = new LongMap[V](sz * 2) - var i = 0 - while (i < sz) { lm(keys(i)) = values(i); i += 1 } - if (lm.size < (sz>>3)) lm.repack() - lm - } - - /** Creates a new `LongMap` from keys and values. - * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. - */ - def fromZip[sealed V](keys: scala.collection.Iterable[Long], values: scala.collection.Iterable[V]): LongMap[V] = { - val sz = math.min(keys.size, values.size) - val lm = new LongMap[V](sz * 2) - val ki = keys.iterator - val vi = values.iterator - while (ki.hasNext && vi.hasNext) lm(ki.next()) = vi.next() - if (lm.size < (sz >> 3)) lm.repack() - lm - } - - implicit def toFactory[sealed V](dummy: LongMap.type): Factory[(Long, V), LongMap[V]] = ToFactory.asInstanceOf[Factory[(Long, V), LongMap[V]]] - - @SerialVersionUID(3L) - private[this] object ToFactory extends Factory[(Long, AnyRef), LongMap[AnyRef]] with Serializable { - def fromSpecific(it: IterableOnce[(Long, AnyRef)]^): LongMap[AnyRef] = LongMap.from[AnyRef](it) - def newBuilder: Builder[(Long, AnyRef), LongMap[AnyRef]] = LongMap.newBuilder[AnyRef] - } - - implicit def toBuildFrom[V](factory: LongMap.type): BuildFrom[Any, (Long, V), LongMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Long, V), LongMap[V]]] - private object ToBuildFrom extends BuildFrom[Any, (Long, AnyRef), LongMap[AnyRef]] { - def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]^) = LongMap.from(it) - def newBuilder(from: Any) = LongMap.newBuilder[AnyRef] - } - - implicit def iterableFactory[sealed V]: Factory[(Long, V), LongMap[V]] = toFactory(this) - implicit def buildFromLongMap[V]: BuildFrom[LongMap[_], (Long, V), LongMap[V]] = toBuildFrom(this) -} diff --git a/tests/pos-special/stdlib/collection/mutable/Map.scala b/tests/pos-special/stdlib/collection/mutable/Map.scala deleted file mode 100644 index dab64ddb1f58..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/Map.scala +++ /dev/null @@ -1,271 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import language.experimental.captureChecking - -/** Base type of mutable Maps */ -trait Map[K, V] - extends Iterable[(K, V)] - with collection.Map[K, V] - with MapOps[K, V, Map, Map[K, V]] - with Growable[(K, V)] - with Shrinkable[K] - with MapFactoryDefaults[K, V, Map, Iterable] { - - override def mapFactory: scala.collection.MapFactory[Map] = Map - - /* - //TODO consider keeping `remove` because it returns the removed entry - @deprecated("Use subtract or -= instead of remove", "2.13.0") - def remove(key: K): Option[V] = { - val old = get(key) - if(old.isDefined) subtract(key) - old - } - */ - - /** The same map with a given default function. - * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. - * are not affected by `withDefaultValue`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d the function mapping keys to values, used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefault(d: K -> V): Map[K, V] = new Map.WithDefault[K, V](this, d) - - /** The same map with a given default value. - * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. - * are not affected by `withDefaultValue`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d default value used for non-present keys - * @return a wrapper of the map with a default value - */ - def withDefaultValue(d: V): Map[K, V] = new Map.WithDefault[K, V](this, x => d) -} - -/** - * @define coll mutable map - * @define Coll `mutable.Map` - */ -trait MapOps[K, V, +CC[X, Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] - extends IterableOps[(K, V), Iterable, C] - with collection.MapOps[K, V, CC, C] - with Cloneable[C] - with Builder[(K, V), C] - with Growable[(K, V)] - with Shrinkable[K] - with Pure { - - def result(): C = coll - - @deprecated("Use - or remove on an immutable Map", "2.13.0") - final def - (key: K): C = clone() -= key - - @deprecated("Use -- or removeAll on an immutable Map", "2.13.0") - final def - (key1: K, key2: K, keys: K*): C = clone() -= key1 -= key2 --= keys - - /** Adds a new key/value pair to this map and optionally returns previously bound value. - * If the map already contains a - * mapping for the key, it will be overridden by the new value. - * - * @param key the key to update - * @param value the new value - * @return an option value containing the value associated with the key - * before the `put` operation was executed, or `None` if `key` - * was not defined in the map before. - */ - def put(key: K, value: V): Option[V] = { - val r = get(key) - update(key, value) - r - } - - /** Adds a new key/value pair to this map. - * If the map already contains a - * mapping for the key, it will be overridden by the new value. - * - * @param key The key to update - * @param value The new value - */ - def update(key: K, value: V): Unit = { coll += ((key, value)) } - - /** - * Update a mapping for the specified key and its current optionally-mapped value - * (`Some` if there is current mapping, `None` if not). - * - * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`. - * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent). - * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged. - * - * @param key the key value - * @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping - * @return the new value associated with the specified key - */ - def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { - val previousValue = this.get(key) - val nextValue = remappingFunction(previousValue) - (previousValue, nextValue) match { - case (None, None) => // do nothing - case (Some(_), None) => this.remove(key) - case (_, Some(v)) => this.update(key,v) - } - nextValue - } - - /** If given key is already in this map, returns associated value. - * - * Otherwise, computes value from given expression `op`, stores with key - * in map and returns that value. - * - * Concurrent map implementations may evaluate the expression `op` - * multiple times, or may evaluate `op` without inserting the result. - * - * @param key the key to test - * @param op the computation yielding the value to associate with `key`, if - * `key` is previously unbound. - * @return the value associated with key (either previously or as a result - * of executing the method). - */ - def getOrElseUpdate(key: K, op: => V): V = - get(key) match { - case Some(v) => v - case None => val d = op; this(key) = d; d - } - - /** Removes a key from this map, returning the value associated previously - * with that key as an option. - * @param key the key to be removed - * @return an option value containing the value associated previously with `key`, - * or `None` if `key` was not defined in the map before. - */ - def remove(key: K): Option[V] = { - val r = get(key) - if (r.isDefined) this -= key - r - } - - def clear(): Unit = { keysIterator foreach -= } - - override def clone(): C = empty ++= this - - @deprecated("Use filterInPlace instead", "2.13.0") - @inline final def retain(p: (K, V) => Boolean): this.type = filterInPlace(p) - - /** Retains only those mappings for which the predicate - * `p` returns `true`. - * - * @param p The test predicate - */ - def filterInPlace(p: (K, V) => Boolean): this.type = { - if (!isEmpty) this match { - case tm: concurrent.Map[_, _] => tm.asInstanceOf[concurrent.Map[K, V]].filterInPlaceImpl(p) - case _ => - val array = this.toArray[Any] // scala/bug#7269 toArray avoids ConcurrentModificationException - val arrayLength = array.length - var i = 0 - while (i < arrayLength) { - val (k, v) = array(i).asInstanceOf[(K, V)] - if (!p(k, v)) { - this -= k - } - i += 1 - } - } - this - } - - @deprecated("Use mapValuesInPlace instead", "2.13.0") - @inline final def transform(f: (K, V) => V): this.type = mapValuesInPlace(f) - - /** Applies a transformation function to all values contained in this map. - * The transformation function produces new values from existing keys - * associated values. - * - * @param f the transformation to apply - * @return the map itself. - */ - def mapValuesInPlace(f: (K, V) => V): this.type = { - if (!isEmpty) this match { - case hm: mutable.HashMap[_, _] => hm.asInstanceOf[mutable.HashMap[K, V]].mapValuesInPlaceImpl(f) - case tm: concurrent.Map[_, _] => tm.asInstanceOf[concurrent.Map[K, V]].mapValuesInPlaceImpl(f) - case _ => - val array = this.toArray[Any] - val arrayLength = array.length - var i = 0 - while (i < arrayLength) { - val (k, v) = array(i).asInstanceOf[(K, V)] - update(k, f(k, v)) - i += 1 - } - } - this - } - - @deprecated("Use m.clone().addOne((k,v)) instead of m.updated(k, v)", "2.13.0") - def updated[V1 >: V](key: K, value: V1): CC[K, V1] = - clone().asInstanceOf[CC[K, V1]].addOne((key, value)) - - override def knownSize: Int = super[IterableOps].knownSize -} - -/** - * $factoryInfo - * @define coll mutable map - * @define Coll `mutable.Map` - */ -@SerialVersionUID(3L) -object Map extends MapFactory.Delegate[Map](HashMap) { - - @SerialVersionUID(3L) - class WithDefault[K, V](val underlying: Map[K, V], val defaultValue: K -> V) - extends AbstractMap[K, V] - with MapOps[K, V, Map, WithDefault[K, V]] with Serializable { - - override def default(key: K): V = defaultValue(key) - - def iterator: scala.collection.Iterator[(K, V)] = underlying.iterator - override def isEmpty: Boolean = underlying.isEmpty - override def knownSize: Int = underlying.knownSize - override def mapFactory: MapFactory[Map] = underlying.mapFactory - - override def clear(): Unit = underlying.clear() - - def get(key: K): Option[V] = underlying.get(key) - - def subtractOne(elem: K): WithDefault.this.type = { underlying.subtractOne(elem); this } - - def addOne(elem: (K, V)): WithDefault.this.type = { underlying.addOne(elem); this } - - override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]^): Map[K, V2] = - underlying.concat(suffix).withDefault(defaultValue) - - override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) - - override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): WithDefault[K, V] = - new WithDefault[K, V](mapFactory.from(coll), defaultValue) - - override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] = - Map.newBuilder.mapResult((p: Map[K, V]) => new WithDefault[K, V](p, defaultValue)) - } - -} - -/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ -abstract class AbstractMap[K, V] extends scala.collection.AbstractMap[K, V] with Map[K, V] diff --git a/tests/pos-special/stdlib/collection/mutable/MultiMap.scala b/tests/pos-special/stdlib/collection/mutable/MultiMap.scala deleted file mode 100644 index 281631c92298..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/MultiMap.scala +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.mutable - -import language.experimental.captureChecking - -/** A trait for mutable maps with multiple values assigned to a key. - * - * This class is typically used as a mixin. It turns maps which map `K` - * to `Set[V]` objects into multimaps that map `K` to `V` objects. - * - * @example {{{ - * // first import all necessary types from package `collection.mutable` - * import collection.mutable.{ HashMap, MultiMap, Set } - * - * // to create a `MultiMap` the easiest way is to mixin it into a normal - * // `Map` instance - * val mm = new HashMap[Int, Set[String]] with MultiMap[Int, String] - * - * // to add key-value pairs to a multimap it is important to use - * // the method `addBinding` because standard methods like `+` will - * // overwrite the complete key-value pair instead of adding the - * // value to the existing key - * mm.addBinding(1, "a") - * mm.addBinding(2, "b") - * mm.addBinding(1, "c") - * - * // mm now contains `Map(2 -> Set(b), 1 -> Set(c, a))` - * - * // to check if the multimap contains a value there is method - * // `entryExists`, which allows to traverse the including set - * mm.entryExists(1, _ == "a") == true - * mm.entryExists(1, _ == "b") == false - * mm.entryExists(2, _ == "b") == true - * - * // to remove a previous added value there is the method `removeBinding` - * mm.removeBinding(1, "a") - * mm.entryExists(1, _ == "a") == false - * }}} - * - * @define coll multimap - * @define Coll `MultiMap` - */ -@deprecated("Use a scala.collection.mutable.MultiDict in the scala-collection-contrib module", "2.13.0") -trait MultiMap[K, sealed V] extends Map[K, Set[V]] { - /** Creates a new set. - * - * Classes that use this trait as a mixin can override this method - * to have the desired implementation of sets assigned to new keys. - * By default this is `HashSet`. - * - * @return An empty set of values of type `V`. - */ - protected def makeSet: Set[V] = new HashSet[V] - - /** Assigns the specified `value` to a specified `key`. If the key - * already has a binding to equal to `value`, nothing is changed; - * otherwise a new binding is added for that `key`. - * - * @param key The key to which to bind the new value. - * @param value The value to bind to the key. - * @return A reference to this multimap. - */ - def addBinding(key: K, value: V): this.type = { - get(key) match { - case None => - val set = makeSet - set += value - this(key) = set - case Some(set) => - set += value - } - this - } - - /** Removes the binding of `value` to `key` if it exists, otherwise this - * operation doesn't have any effect. - * - * If this was the last value assigned to the specified key, the - * set assigned to that key will be removed as well. - * - * @param key The key of the binding. - * @param value The value to remove. - * @return A reference to this multimap. - */ - def removeBinding(key: K, value: V): this.type = { - get(key) match { - case None => - case Some(set) => - set -= value - if (set.isEmpty) this -= key - } - this - } - - /** Checks if there exists a binding to `key` such that it satisfies the predicate `p`. - * - * @param key The key for which the predicate is checked. - * @param p The predicate which a value assigned to the key must satisfy. - * @return A boolean if such a binding exists - */ - def entryExists(key: K, p: V => Boolean): Boolean = get(key) match { - case None => false - case Some(set) => set exists p - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala b/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala deleted file mode 100644 index f1deb25b6a8a..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala +++ /dev/null @@ -1,307 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import java.lang.Integer.numberOfLeadingZeros -import java.util.ConcurrentModificationException -import scala.collection.generic.DefaultSerializable -import language.experimental.captureChecking - -/** - * @define Coll `OpenHashMap` - * @define coll open hash map - */ -@deprecated("Use HashMap or one of the specialized versions (LongMap, AnyRefMap) instead of OpenHashMap", "2.13.0") -@SerialVersionUID(3L) -object OpenHashMap extends MapFactory[OpenHashMap] { - - def empty[sealed K, sealed V] = new OpenHashMap[K, V] - def from[sealed K, sealed V](it: IterableOnce[(K, V)]^): OpenHashMap[K,V] = empty ++= it - - def newBuilder[sealed K, sealed V]: Builder[(K, V), OpenHashMap[K,V]] = - new GrowableBuilder[(K, V), OpenHashMap[K, V]](empty) - - /** A hash table entry. - * - * The entry is occupied if and only if its `value` is a `Some`; - * deleted if and only if its `value` is `None`. - * If its `key` is not the default value of type `Key`, the entry is occupied. - * If the entry is occupied, `hash` contains the hash value of `key`. - */ - final private class OpenEntry[sealed Key, sealed Value](var key: Key, - var hash: Int, - var value: Option[Value]) - - private[mutable] def nextPositivePowerOfTwo(target: Int): Int = 1 << -numberOfLeadingZeros(target - 1) -} - -/** A mutable hash map based on an open addressing method. The precise scheme is - * undefined, but it should make a reasonable effort to ensure that an insert - * with consecutive hash codes is not unnecessarily penalised. In particular, - * mappings of consecutive integer keys should work without significant - * performance loss. - * - * @tparam Key type of the keys in this map. - * @tparam Value type of the values in this map. - * @param initialSize the initial size of the internal hash table. - * - * @define Coll `OpenHashMap` - * @define coll open hash map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@deprecated("Use HashMap or one of the specialized versions (LongMap, AnyRefMap) instead of OpenHashMap", "2.13.0") -class OpenHashMap[sealed Key, sealed Value](initialSize : Int) - extends AbstractMap[Key, Value] - with MapOps[Key, Value, OpenHashMap, OpenHashMap[Key, Value]] - with StrictOptimizedIterableOps[(Key, Value), Iterable, OpenHashMap[Key, Value]] - with MapFactoryDefaults[Key, Value, OpenHashMap, Iterable] - with DefaultSerializable { - - import OpenHashMap.OpenEntry - private type Entry = OpenEntry[Key, Value] - - /** A default constructor creates a hashmap with initial size `8`. - */ - def this() = this(8) - - override def mapFactory: MapFactory[OpenHashMap] = OpenHashMap - - private[this] val actualInitialSize = OpenHashMap.nextPositivePowerOfTwo(initialSize) - - private[this] var mask = actualInitialSize - 1 - - /** The hash table. - * - * The table's entries are initialized to `null`, indication of an empty slot. - * A slot is either deleted or occupied if and only if the entry is non-`null`. - */ - private[this] var table = new Array[Entry](actualInitialSize) - - private[this] var _size = 0 - private[this] var deleted = 0 - - // Used for tracking inserts so that iterators can determine if concurrent modification has occurred. - private[this] var modCount = 0 - - override def size = _size - override def knownSize: Int = size - private[this] def size_=(s : Int): Unit = _size = s - override def isEmpty: Boolean = _size == 0 - /** Returns a mangled hash code of the provided key. */ - protected def hashOf(key: Key) = { - var h = key.## - h ^= ((h >>> 20) ^ (h >>> 12)) - h ^ (h >>> 7) ^ (h >>> 4) - } - - /** Increase the size of the table. - * Copy only the occupied slots, effectively eliminating the deleted slots. - */ - private[this] def growTable() = { - val oldSize = mask + 1 - val newSize = 4 * oldSize - val oldTable = table - table = new Array[Entry](newSize) - mask = newSize - 1 - oldTable.foreach( entry => - if (entry != null && entry.value != None) - table(findIndex(entry.key, entry.hash)) = entry ) - deleted = 0 - } - - /** Return the index of the first slot in the hash table (in probe order) - * that is, in order of preference, either occupied by the given key, deleted, or empty. - * - * @param hash hash value for `key` - */ - private[this] def findIndex(key: Key, hash: Int): Int = { - var index = hash & mask - var j = 0 - - // Index of the first slot containing a deleted entry, or -1 if none found yet - var firstDeletedIndex = -1 - - var entry = table(index) - while (entry != null) { - if (entry.hash == hash && entry.key == key && entry.value != None) - return index - - if (firstDeletedIndex == -1 && entry.value == None) - firstDeletedIndex = index - - j += 1 - index = (index + j) & mask - entry = table(index) - } - - if (firstDeletedIndex == -1) index else firstDeletedIndex - } - - // TODO refactor `put` to extract `findOrAddEntry` and implement this in terms of that to avoid Some boxing. - override def update(key: Key, value: Value): Unit = put(key, value) - - @deprecatedOverriding("addOne should not be overridden in order to maintain consistency with put.", "2.11.0") - def addOne (kv: (Key, Value)): this.type = { put(kv._1, kv._2); this } - - @deprecatedOverriding("subtractOne should not be overridden in order to maintain consistency with remove.", "2.11.0") - def subtractOne (key: Key): this.type = { remove(key); this } - - override def put(key: Key, value: Value): Option[Value] = - put(key, hashOf(key), value) - - private def put(key: Key, hash: Int, value: Value): Option[Value] = { - if (2 * (size + deleted) > mask) growTable() - val index = findIndex(key, hash) - val entry = table(index) - if (entry == null) { - table(index) = new OpenEntry(key, hash, Some(value)) - modCount += 1 - size += 1 - None - } else { - val res = entry.value - if (entry.value == None) { - entry.key = key - entry.hash = hash - size += 1 - deleted -= 1 - modCount += 1 - } - entry.value = Some(value) - res - } - } - - /** Delete the hash table slot contained in the given entry. */ - @`inline` - private[this] def deleteSlot(entry: Entry) = { - entry.key = null.asInstanceOf[Key] - entry.hash = 0 - entry.value = None - - size -= 1 - deleted += 1 - } - - override def remove(key : Key): Option[Value] = { - val entry = table(findIndex(key, hashOf(key))) - if (entry != null && entry.value != None) { - val res = entry.value - deleteSlot(entry) - res - } else None - } - - def get(key : Key) : Option[Value] = { - val hash = hashOf(key) - var index = hash & mask - var entry = table(index) - var j = 0 - while(entry != null){ - if (entry.hash == hash && - entry.key == key){ - return entry.value - } - - j += 1 - index = (index + j) & mask - entry = table(index) - } - None - } - - /** An iterator over the elements of this map. Use of this iterator follows - * the same contract for concurrent modification as the foreach method. - * - * @return the iterator - */ - def iterator: Iterator[(Key, Value)] = new OpenHashMapIterator[(Key, Value)] { - override protected def nextResult(node: Entry): (Key, Value) = (node.key, node.value.get) - } - - override def keysIterator: Iterator[Key] = new OpenHashMapIterator[Key] { - override protected def nextResult(node: Entry): Key = node.key - } - override def valuesIterator: Iterator[Value] = new OpenHashMapIterator[Value] { - override protected def nextResult(node: Entry): Value = node.value.get - } - - private abstract class OpenHashMapIterator[A] extends AbstractIterator[A] { - private[this] var index = 0 - private[this] val initialModCount = modCount - - private[this] def advance(): Unit = { - if (initialModCount != modCount) throw new ConcurrentModificationException - while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1 - } - - def hasNext = {advance(); index <= mask } - - def next() = { - advance() - val result = table(index) - index += 1 - nextResult(result) - } - protected def nextResult(node: Entry): A - } - - override def clone() = { - val it = new OpenHashMap[Key, Value] - foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get)) - it - } - - /** Loop over the key, value mappings of this map. - * - * The behaviour of modifying the map during an iteration is as follows: - * - Deleting a mapping is always permitted. - * - Changing the value of mapping which is already present is permitted. - * - Anything else is not permitted. It will usually, but not always, throw an exception. - * - * @tparam U The return type of the specified function `f`, return result of which is ignored. - * @param f The function to apply to each key, value mapping. - */ - override def foreach[U](f : ((Key, Value)) => U): Unit = { - val startModCount = modCount - foreachUndeletedEntry(entry => { - if (modCount != startModCount) throw new ConcurrentModificationException - f((entry.key, entry.value.get))} - ) - } - override def foreachEntry[U](f : (Key, Value) => U): Unit = { - val startModCount = modCount - foreachUndeletedEntry(entry => { - if (modCount != startModCount) throw new ConcurrentModificationException - f(entry.key, entry.value.get)} - ) - } - - private[this] def foreachUndeletedEntry(f : Entry => Unit): Unit = { - table.foreach(entry => if (entry != null && entry.value != None) f(entry)) - } - - override def mapValuesInPlace(f : (Key, Value) => Value): this.type = { - foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get))) - this - } - - override def filterInPlace(f : (Key, Value) => Boolean): this.type = { - foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) deleteSlot(entry)) - this - } - - override protected[this] def stringPrefix = "OpenHashMap" -} diff --git a/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala b/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala deleted file mode 100644 index a395fac4a44a..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala +++ /dev/null @@ -1,403 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import scala.collection.generic.DefaultSerializationProxy -import scala.math.Ordering -import language.experimental.captureChecking - -/** A heap-based priority queue. - * - * To prioritize elements of type `A` there must be an implicit - * `Ordering[A]` available at creation. Elements are retrieved - * in priority order by using [[dequeue]] or [[dequeueAll]]. - * - * If multiple elements have the same priority as determined by the ordering for this - * `PriorityQueue`, no guarantees are made regarding the order in which those elements - * are returned by `dequeue` or `dequeueAll`. In particular, that means this - * class does not guarantee first-in-first-out behavior, as may be - * incorrectly inferred from the fact that this data structure is - * called a "queue". - * - * Only the `dequeue` and `dequeueAll` methods will return elements in priority - * order (while removing elements from the heap). Standard collection methods - * such as `drop`, `iterator`, `toList` and `toString` use an arbitrary - * iteration order: they will traverse the heap or remove elements - * in whichever order seems most convenient. - * - * Therefore, printing a `PriorityQueue` will not show elements in priority order, - * though the highest-priority element will be printed first. - * To print the elements in order, it's necessary to `dequeue` them. - * To do this non-destructively, duplicate the `PriorityQueue` first; - * the `clone` method is a suitable way to obtain a disposable copy. - * - * Client keys are assumed to be immutable. Mutating keys may violate - * the invariant of the underlying heap-ordered tree. Note that [[clone]] - * does not rebuild the underlying tree. - * - * {{{ - * scala> val pq = collection.mutable.PriorityQueue(1, 2, 5, 3, 7) - * val pq: scala.collection.mutable.PriorityQueue[Int] = PriorityQueue(7, 3, 5, 1, 2) - * - * scala> pq.toList // also not in order - * val res0: List[Int] = List(7, 3, 5, 1, 2) - * - * scala> pq.clone.dequeueAll - * val res1: Seq[Int] = ArraySeq(7, 5, 3, 2, 1) - * }}} - * - * @tparam A type of the elements in this priority queue. - * @param ord implicit ordering used to compare the elements of type `A`. - * - * @define Coll PriorityQueue - * @define coll priority queue - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -sealed class PriorityQueue[sealed A](implicit val ord: Ordering[A]) - extends AbstractIterable[A] - with Iterable[A] - with IterableOps[A, Iterable, PriorityQueue[A]] - with StrictOptimizedIterableOps[A, Iterable, PriorityQueue[A]] - with Builder[A, PriorityQueue[A]] - with Cloneable[PriorityQueue[A]] - with Growable[A] - with Serializable -{ - - private class ResizableArrayAccess[sealed A0] extends ArrayBuffer[A0] { - override def mapInPlace(f: A0 => A0): this.type = { - var i = 1 // see "we do not use array(0)" comment below (???) - val siz = this.size - while (i < siz) { this(i) = f(this(i)); i += 1 } - this - } - - def p_size0 = size0 - def p_size0_=(s: Int) = size0 = s - def p_array = array - def p_ensureSize(n: Int) = super.ensureSize(n) - def p_ensureAdditionalSize(n: Int) = super.ensureAdditionalSize(n) - def p_swap(a: Int, b: Int): Unit = { - val h = array(a) - array(a) = array(b) - array(b) = h - } - } - - private val resarr = new ResizableArrayAccess[A] - - resarr.p_size0 += 1 // we do not use array(0) TODO: explain -- what is the first element even for? - def length: Int = resarr.length - 1 // adjust length accordingly - override def size: Int = length - override def knownSize: Int = length - override def isEmpty: Boolean = resarr.p_size0 < 2 - - // not eligible for EvidenceIterableFactoryDefaults since C != CC[A] (PriorityQueue[A] != Iterable[A]) - override protected def fromSpecific(coll: scala.collection.IterableOnce[A]^): PriorityQueue[A] = PriorityQueue.from(coll) - override protected def newSpecificBuilder: Builder[A, PriorityQueue[A]] = PriorityQueue.newBuilder - override def empty: PriorityQueue[A] = PriorityQueue.empty - - def mapInPlace(f: A => A): this.type = { - resarr.mapInPlace(f) - heapify(1) - this - } - - def result() = this - - private def toA(x: AnyRef): A = x.asInstanceOf[A] - protected def fixUp(as: Array[AnyRef], m: Int): Unit = { - var k: Int = m - // use `ord` directly to avoid allocating `OrderingOps` - while (k > 1 && ord.lt(toA(as(k / 2)), toA(as(k)))) { - resarr.p_swap(k, k / 2) - k = k / 2 - } - } - - protected def fixDown(as: Array[AnyRef], m: Int, n: Int): Boolean = { - // returns true if any swaps were done (used in heapify) - var k: Int = m - while (n >= 2 * k) { - var j = 2 * k - // use `ord` directly to avoid allocating `OrderingOps` - if (j < n && ord.lt(toA(as(j)), toA(as(j + 1)))) - j += 1 - if (ord.gteq(toA(as(k)), toA(as(j)))) - return k != m - else { - val h = as(k) - as(k) = as(j) - as(j) = h - k = j - } - } - k != m - } - - /** Inserts a single element into the priority queue. - * - * @param elem the element to insert. - * @return this $coll. - */ - def addOne(elem: A): this.type = { - resarr.p_ensureAdditionalSize(1) - resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef] - fixUp(resarr.p_array, resarr.p_size0) - resarr.p_size0 += 1 - this - } - - override def addAll(xs: IterableOnce[A]^): this.type = { - val from = resarr.p_size0 - for (x <- xs.iterator) unsafeAdd(x) - heapify(from) - this - } - - private def unsafeAdd(elem: A): Unit = { - // like += but skips fixUp, which breaks the ordering invariant - // a series of unsafeAdds MUST be followed by heapify - resarr.p_ensureAdditionalSize(1) - resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef] - resarr.p_size0 += 1 - } - - private def heapify(from: Int): Unit = { - // elements at indices 1..from-1 were already in heap order before any adds - // elements at indices from..n are newly added, their order must be fixed - val n = length - - if (from <= 2) { - // no pre-existing order to maintain, do the textbook heapify algorithm - for (i <- n/2 to 1 by -1) fixDown(resarr.p_array, i, n) - } - else if (n - from < 4) { - // for very small adds, doing the simplest fix is faster - for (i <- from to n) fixUp(resarr.p_array, i) - } - else { - var min = from/2 // tracks the minimum element in the queue - val queue = scala.collection.mutable.Queue[Int](min) - - // do fixDown on the parents of all the new elements - // except the parent of the first new element, which is in the queue - // (that parent is treated specially because it might be the root) - for (i <- n/2 until min by -1) { - if (fixDown(resarr.p_array, i, n)) { - // there was a swap, so also need to fixDown i's parent - val parent = i/2 - if (parent < min) { // make sure same parent isn't added twice - min = parent - queue += parent - } - } - } - - while (queue.nonEmpty) { - val i = queue.dequeue() - if (fixDown(resarr.p_array, i, n)) { - val parent = i/2 - if (parent < min && parent > 0) { - // the "parent > 0" is to avoid adding the parent of the root - min = parent - queue += parent - } - } - } - } - } - - /** Adds all elements provided by a `IterableOnce` object - * into the priority queue. - * - * @param xs a iterable object. - * @return a new priority queue containing elements of both `xs` and `this`. - */ - def ++(xs: IterableOnce[A]): PriorityQueue[A] = { this.clone() ++= xs } - - /** Adds all elements to the queue. - * - * @param elems the elements to add. - */ - def enqueue(elems: A*): Unit = { this ++= elems } - - /** Returns the element with the highest priority in the queue, - * and removes this element from the queue. - * - * @throws NoSuchElementException - * @return the element with the highest priority. - */ - def dequeue(): A = - if (resarr.p_size0 > 1) { - resarr.p_size0 = resarr.p_size0 - 1 - val result = resarr.p_array(1) - resarr.p_array(1) = resarr.p_array(resarr.p_size0) - resarr.p_array(resarr.p_size0) = null // erase reference from array - fixDown(resarr.p_array, 1, resarr.p_size0 - 1) - toA(result) - } else - throw new NoSuchElementException("no element to remove from heap") - - def dequeueAll[A1 >: A]: immutable.Seq[A1] = { - val b = ArrayBuilder.make[Any] - b.sizeHint(size) - while (nonEmpty) { - b += dequeue() - } - immutable.ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[immutable.ArraySeq[A1]] - } - - /** Returns the element with the highest priority in the queue, - * or throws an error if there is no element contained in the queue. - * - * @return the element with the highest priority. - */ - override def head: A = if (resarr.p_size0 > 1) toA(resarr.p_array(1)) else throw new NoSuchElementException("queue is empty") - - /** Removes all elements from the queue. After this operation is completed, - * the queue will be empty. - */ - def clear(): Unit = { - resarr.clear() - resarr.p_size0 = 1 - } - - /** Returns an iterator which yields all the elements. - * - * Note: The order of elements returned is undefined. - * If you want to traverse the elements in priority queue - * order, use `clone().dequeueAll.iterator`. - * - * @return an iterator over all the elements. - */ - override def iterator: Iterator[A] = resarr.iterator.drop(1) - - /** Returns the reverse of this priority queue. The new priority queue has - * the same elements as the original, but the opposite ordering. - * - * For example, the element with the highest priority in `pq` has the lowest - * priority in `pq.reverse`, and vice versa. - * - * Ties are handled arbitrarily. Elements with equal priority may or - * may not be reversed with respect to each other. - * - * @return the reversed priority queue. - */ - def reverse: PriorityQueue[A] = { - val revq = new PriorityQueue[A]()(ord.reverse) - // copy the existing data into the new array backwards - // this won't put it exactly into the correct order, - // but will require less fixing than copying it in - // the original order - val n = resarr.p_size0 - revq.resarr.p_ensureSize(n) - revq.resarr.p_size0 = n - val from = resarr.p_array - val to = revq.resarr.p_array - for (i <- 1 until n) to(i) = from(n-i) - revq.heapify(1) - revq - } - - - /** Returns an iterator which yields all the elements in the reverse order - * than that returned by the method `iterator`. - * - * Note: The order of elements returned is undefined. - * - * @return an iterator over all elements sorted in descending order. - */ - def reverseIterator: Iterator[A] = new AbstractIterator[A] { - private[this] var i = resarr.p_size0 - 1 - def hasNext: Boolean = i >= 1 - def next(): A = { - val n = resarr.p_array(i) - i -= 1 - toA(n) - } - } - - /** Returns a regular queue containing the same elements. - * - * Note: the order of elements is undefined. - */ - def toQueue: Queue[A] = new Queue[A] ++= this.iterator - - /** Returns a textual representation of a queue as a string. - * - * @return the string representation of this queue. - */ - override def toString() = toList.mkString("PriorityQueue(", ", ", ")") - - /** Converts this $coll to a list. - * - * Note: the order of elements is undefined. - * - * @return a list containing all elements of this $coll. - */ - override def toList: immutable.List[A] = immutable.List.from(this.iterator) - - /** This method clones the priority queue. - * - * @return a priority queue with the same elements. - */ - override def clone(): PriorityQueue[A] = { - val pq = new PriorityQueue[A] - val n = resarr.p_size0 - pq.resarr.p_ensureSize(n) - java.lang.System.arraycopy(resarr.p_array, 1, pq.resarr.p_array, 1, n-1) - pq.resarr.p_size0 = n - pq - } - - override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { - val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) - if (copied > 0) { - Array.copy(resarr.p_array, 1, xs, start, copied) - } - copied - } - - @deprecated("Use `PriorityQueue` instead", "2.13.0") - def orderedCompanion: PriorityQueue.type = PriorityQueue - - protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(PriorityQueue.evidenceIterableFactory[A], this) - - override protected[this] def className = "PriorityQueue" -} - - -@SerialVersionUID(3L) -object PriorityQueue extends SortedIterableFactory[PriorityQueue] { - def newBuilder[sealed A : Ordering]: Builder[A, PriorityQueue[A]] = { - new Builder[A, PriorityQueue[A]] { - val pq = new PriorityQueue[A] - def addOne(elem: A): this.type = { pq.unsafeAdd(elem); this } - def result(): PriorityQueue[A] = { pq.heapify(1); pq } - def clear(): Unit = pq.clear() - } - } - - def empty[sealed A : Ordering]: PriorityQueue[A] = new PriorityQueue[A] - - def from[sealed E : Ordering](it: IterableOnce[E]^): PriorityQueue[E] = { - val b = newBuilder[E] - b ++= it - b.result() - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/Queue.scala b/tests/pos-special/stdlib/collection/mutable/Queue.scala deleted file mode 100644 index a578b0742009..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/Queue.scala +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import scala.annotation.nowarn -import scala.collection.generic.DefaultSerializable -import language.experimental.captureChecking - - -/** `Queue` objects implement data structures that allow to - * insert and retrieve elements in a first-in-first-out (FIFO) manner. - * - * @define Coll `mutable.Queue` - * @define coll mutable queue - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -class Queue[sealed A] protected (array: Array[AnyRef], start: Int, end: Int) - extends ArrayDeque[A](array, start, end) - with IndexedSeqOps[A, Queue, Queue[A]] - with StrictOptimizedSeqOps[A, Queue, Queue[A]] - with IterableFactoryDefaults[A, Queue] - with ArrayDequeOps[A, Queue, Queue[A]] - with Cloneable[Queue[A]] - with DefaultSerializable { - - def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = - this(ArrayDeque.alloc(initialSize), start = 0, end = 0) - - override def iterableFactory: SeqFactory[Queue] = Queue - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "Queue" - - /** - * Add elements to the end of this queue - * - * @param elem - * @return this - */ - def enqueue(elem: A): this.type = this += elem - - /** Enqueue two or more elements at the end of the queue. The last element - * of the sequence will be on end of the queue. - * - * @param elems the element sequence. - * @return this - */ - def enqueue(elem1: A, elem2: A, elems: A*): this.type = enqueue(elem1).enqueue(elem2).enqueueAll(elems) - - /** Enqueues all elements in the given iterable object into the queue. The - * last element in the iterable object will be on front of the new queue. - * - * @param elems the iterable object. - * @return this - */ - def enqueueAll(elems: scala.collection.IterableOnce[A]): this.type = this ++= elems - - /** - * Removes the first element from this queue and returns it - * - * @return - * @throws NoSuchElementException when queue is empty - */ - def dequeue(): A = removeHead() - - /** Returns the first element in the queue which satisfies the - * given predicate, and removes this element from the queue. - * - * @param p the predicate used for choosing the first element - * @return the first element of the queue for which p yields true - */ - def dequeueFirst(p: A => Boolean): Option[A] = - removeFirst(p) - - /** Returns all elements in the queue which satisfy the - * given predicate, and removes those elements from the queue. - * - * @param p the predicate used for choosing elements - * @return a sequence of all elements in the queue for which - * p yields true. - */ - def dequeueAll(p: A => Boolean): scala.collection.immutable.Seq[A] = - removeAll(p) - - /** - * Returns and dequeues all elements from the queue which satisfy the given predicate - * - * @param f the predicate used for choosing elements - * @return The removed elements - */ - def dequeueWhile(f: A => Boolean): scala.collection.Seq[A] = removeHeadWhile(f) - - /** Returns the first element in the queue, or throws an error if there - * is no element contained in the queue. - * - * @return the first element. - */ - @`inline` final def front: A = head - - override protected def klone(): Queue[A] = { - val bf = newSpecificBuilder - bf ++= this - bf.result() - } - - override protected def ofArray(array: Array[AnyRef], end: Int): Queue[A] = - new Queue(array, start = 0, end) - -} - -/** - * $factoryInfo - * @define coll queue - * @define Coll `Queue` - */ -@SerialVersionUID(3L) -object Queue extends StrictOptimizedSeqFactory[Queue] { - - def from[sealed A](source: IterableOnce[A]^): Queue[A] = empty ++= source - - def empty[sealed A]: Queue[A] = new Queue - - def newBuilder[sealed A]: Builder[A, Queue[A]] = new GrowableBuilder[A, Queue[A]](empty) - -} diff --git a/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala b/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala deleted file mode 100644 index 1f320f832cdf..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala +++ /dev/null @@ -1,653 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.mutable - -import scala.annotation.tailrec -import collection.{AbstractIterator, Iterator} -import java.lang.String -import language.experimental.captureChecking - -/** - * An object containing the red-black tree implementation used by mutable `TreeMaps`. - * - * The trees implemented in this object are *not* thread safe. - */ -private[collection] object RedBlackTree { - - // ---- class structure ---- - - // For performance reasons, this implementation uses `null` references to represent leaves instead of a sentinel node. - // Currently, the internal nodes do not store their subtree size - only the tree object keeps track of their size. - // Therefore, while obtaining the size of the whole tree is O(1), knowing the number of entries inside a range is O(n) - // on the size of the range. - - final class Tree[sealed A, sealed B](var root: Node[A, B], var size: Int) { - def treeCopy(): Tree[A, B] = new Tree(copyTree(root), size) - } - - final class Node[sealed A, sealed B](var key: A, var value: B, var red: Boolean, var left: Node[A, B], var right: Node[A, B], var parent: Node[A, B]) { - override def toString: String = "Node(" + key + ", " + value + ", " + red + ", " + left + ", " + right + ")" - } - - object Tree { - def empty[sealed A, sealed B]: Tree[A, B] = new Tree(null, 0) - } - - object Node { - - @`inline` def apply[sealed A, sealed B](key: A, value: B, red: Boolean, - left: Node[A, B], right: Node[A, B], parent: Node[A, B]): Node[A, B] = - new Node(key, value, red, left, right, parent) - - @`inline` def leaf[sealed A, sealed B](key: A, value: B, red: Boolean, parent: Node[A, B]): Node[A, B] = - new Node(key, value, red, null, null, parent) - - def unapply[A, B](t: Node[A, B]) = Some((t.key, t.value, t.left, t.right, t.parent)) - } - - // ---- getters ---- - - def isRed(node: Node[_, _]) = (node ne null) && node.red - def isBlack(node: Node[_, _]) = (node eq null) || !node.red - - // ---- size ---- - - def size(node: Node[_, _]): Int = if (node eq null) 0 else 1 + size(node.left) + size(node.right) - def size(tree: Tree[_, _]): Int = tree.size - def isEmpty(tree: Tree[_, _]) = tree.root eq null - def clear(tree: Tree[_, _]): Unit = { tree.root = null; tree.size = 0 } - - // ---- search ---- - - def get[A: Ordering, B](tree: Tree[A, B], key: A): Option[B] = getNode(tree.root, key) match { - case null => None - case node => Some(node.value) - } - - @tailrec private[this] def getNode[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = - if (node eq null) null - else { - val cmp = ord.compare(key, node.key) - if (cmp < 0) getNode(node.left, key) - else if (cmp > 0) getNode(node.right, key) - else node - } - - def contains[A: Ordering](tree: Tree[A, _], key: A): Boolean = getNode(tree.root, key) ne null - - def min[A, B](tree: Tree[A, B]): Option[(A, B)] = minNode(tree.root) match { - case null => None - case node => Some((node.key, node.value)) - } - - def minKey[A](tree: Tree[A, _]): Option[A] = minNode(tree.root) match { - case null => None - case node => Some(node.key) - } - - private def minNode[A, B](node: Node[A, B]): Node[A, B] = - if (node eq null) null else minNodeNonNull(node) - - @tailrec def minNodeNonNull[A, B](node: Node[A, B]): Node[A, B] = - if (node.left eq null) node else minNodeNonNull(node.left) - - def max[A, B](tree: Tree[A, B]): Option[(A, B)] = maxNode(tree.root) match { - case null => None - case node => Some((node.key, node.value)) - } - - def maxKey[A](tree: Tree[A, _]): Option[A] = maxNode(tree.root) match { - case null => None - case node => Some(node.key) - } - - private def maxNode[A, B](node: Node[A, B]): Node[A, B] = - if (node eq null) null else maxNodeNonNull(node) - - @tailrec def maxNodeNonNull[A, B](node: Node[A, B]): Node[A, B] = - if (node.right eq null) node else maxNodeNonNull(node.right) - - /** - * Returns the first (lowest) map entry with a key equal or greater than `key`. Returns `None` if there is no such - * node. - */ - def minAfter[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Option[(A, B)] = - minNodeAfter(tree.root, key) match { - case null => None - case node => Some((node.key, node.value)) - } - - def minKeyAfter[A](tree: Tree[A, _], key: A)(implicit ord: Ordering[A]): Option[A] = - minNodeAfter(tree.root, key) match { - case null => None - case node => Some(node.key) - } - - private[this] def minNodeAfter[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = { - if (node eq null) null - else { - var y: Node[A, B] = null - var x = node - var cmp = 1 - while ((x ne null) && cmp != 0) { - y = x - cmp = ord.compare(key, x.key) - x = if (cmp < 0) x.left else x.right - } - if (cmp <= 0) y else successor(y) - } - } - - /** - * Returns the last (highest) map entry with a key smaller than `key`. Returns `None` if there is no such node. - */ - def maxBefore[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Option[(A, B)] = - maxNodeBefore(tree.root, key) match { - case null => None - case node => Some((node.key, node.value)) - } - - def maxKeyBefore[A](tree: Tree[A, _], key: A)(implicit ord: Ordering[A]): Option[A] = - maxNodeBefore(tree.root, key) match { - case null => None - case node => Some(node.key) - } - - private[this] def maxNodeBefore[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = { - if (node eq null) null - else { - var y: Node[A, B] = null - var x = node - var cmp = 1 - while ((x ne null) && cmp != 0) { - y = x - cmp = ord.compare(key, x.key) - x = if (cmp < 0) x.left else x.right - } - if (cmp > 0) y else predecessor(y) - } - } - - // ---- insertion ---- - - def insert[sealed A, sealed B](tree: Tree[A, B], key: A, value: B)(implicit ord: Ordering[A]): Unit = { - var y: Node[A, B] = null - var x = tree.root - var cmp = 1 - while ((x ne null) && cmp != 0) { - y = x - cmp = ord.compare(key, x.key) - x = if (cmp < 0) x.left else x.right - } - - if (cmp == 0) y.value = value - else { - val z = Node.leaf(key, value, red = true, y) - - if (y eq null) tree.root = z - else if (cmp < 0) y.left = z - else y.right = z - - fixAfterInsert(tree, z) - tree.size += 1 - } - } - - private[this] def fixAfterInsert[A, B](tree: Tree[A, B], node: Node[A, B]): Unit = { - var z = node - while (isRed(z.parent)) { - if (z.parent eq z.parent.parent.left) { - val y = z.parent.parent.right - if (isRed(y)) { - z.parent.red = false - y.red = false - z.parent.parent.red = true - z = z.parent.parent - } else { - if (z eq z.parent.right) { - z = z.parent - rotateLeft(tree, z) - } - z.parent.red = false - z.parent.parent.red = true - rotateRight(tree, z.parent.parent) - } - } else { // symmetric cases - val y = z.parent.parent.left - if (isRed(y)) { - z.parent.red = false - y.red = false - z.parent.parent.red = true - z = z.parent.parent - } else { - if (z eq z.parent.left) { - z = z.parent - rotateRight(tree, z) - } - z.parent.red = false - z.parent.parent.red = true - rotateLeft(tree, z.parent.parent) - } - } - } - tree.root.red = false - } - - // ---- deletion ---- - - def delete[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Unit = { - val z = getNode(tree.root, key) - if (z ne null) { - var y = z - var yIsRed = y.red - var x: Node[A, B] = null - var xParent: Node[A, B] = null - - if (z.left eq null) { - x = z.right - transplant(tree, z, z.right) - xParent = z.parent - } - else if (z.right eq null) { - x = z.left - transplant(tree, z, z.left) - xParent = z.parent - } - else { - y = minNodeNonNull(z.right) - yIsRed = y.red - x = y.right - - if (y.parent eq z) xParent = y - else { - xParent = y.parent - transplant(tree, y, y.right) - y.right = z.right - y.right.parent = y - } - transplant(tree, z, y) - y.left = z.left - y.left.parent = y - y.red = z.red - } - - if (!yIsRed) fixAfterDelete(tree, x, xParent) - tree.size -= 1 - } - } - - private[this] def fixAfterDelete[A, B](tree: Tree[A, B], node: Node[A, B], parent: Node[A, B]): Unit = { - var x = node - var xParent = parent - while ((x ne tree.root) && isBlack(x)) { - if (x eq xParent.left) { - var w = xParent.right - // assert(w ne null) - - if (w.red) { - w.red = false - xParent.red = true - rotateLeft(tree, xParent) - w = xParent.right - } - if (isBlack(w.left) && isBlack(w.right)) { - w.red = true - x = xParent - } else { - if (isBlack(w.right)) { - w.left.red = false - w.red = true - rotateRight(tree, w) - w = xParent.right - } - w.red = xParent.red - xParent.red = false - w.right.red = false - rotateLeft(tree, xParent) - x = tree.root - } - } else { // symmetric cases - var w = xParent.left - // assert(w ne null) - - if (w.red) { - w.red = false - xParent.red = true - rotateRight(tree, xParent) - w = xParent.left - } - if (isBlack(w.right) && isBlack(w.left)) { - w.red = true - x = xParent - } else { - if (isBlack(w.left)) { - w.right.red = false - w.red = true - rotateLeft(tree, w) - w = xParent.left - } - w.red = xParent.red - xParent.red = false - w.left.red = false - rotateRight(tree, xParent) - x = tree.root - } - } - xParent = x.parent - } - if (x ne null) x.red = false - } - - // ---- helpers ---- - - /** - * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is, - * therefore, the last node), this method returns `null`. - */ - private[this] def successor[A, B](node: Node[A, B]): Node[A, B] = { - if (node.right ne null) minNodeNonNull(node.right) - else { - var x = node - var y = x.parent - while ((y ne null) && (x eq y.right)) { - x = y - y = y.parent - } - y - } - } - - /** - * Returns the node that precedes `node` in an in-order tree traversal. If `node` has the minimum key (and is, - * therefore, the first node), this method returns `null`. - */ - private[this] def predecessor[A, B](node: Node[A, B]): Node[A, B] = { - if (node.left ne null) maxNodeNonNull(node.left) - else { - var x = node - var y = x.parent - while ((y ne null) && (x eq y.left)) { - x = y - y = y.parent - } - y - } - } - - private[this] def rotateLeft[A, B](tree: Tree[A, B], x: Node[A, B]): Unit = if (x ne null) { - // assert(x.right ne null) - val y = x.right - x.right = y.left - - if (y.left ne null) y.left.parent = x - y.parent = x.parent - - if (x.parent eq null) tree.root = y - else if (x eq x.parent.left) x.parent.left = y - else x.parent.right = y - - y.left = x - x.parent = y - } - - private[this] def rotateRight[A, B](tree: Tree[A, B], x: Node[A, B]): Unit = if (x ne null) { - // assert(x.left ne null) - val y = x.left - x.left = y.right - - if (y.right ne null) y.right.parent = x - y.parent = x.parent - - if (x.parent eq null) tree.root = y - else if (x eq x.parent.right) x.parent.right = y - else x.parent.left = y - - y.right = x - x.parent = y - } - - /** - * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous - * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged. - */ - private[this] def transplant[A, B](tree: Tree[A, B], to: Node[A, B], from: Node[A, B]): Unit = { - if (to.parent eq null) tree.root = from - else if (to eq to.parent.left) to.parent.left = from - else to.parent.right = from - - if (from ne null) from.parent = to.parent - } - - // ---- tree traversal ---- - - def foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = foreachNode(tree.root, f) - - private[this] def foreachNode[A, B, U](node: Node[A, B], f: ((A, B)) => U): Unit = - if (node ne null) foreachNodeNonNull(node, f) - - private[this] def foreachNodeNonNull[A, B, U](node: Node[A, B], f: ((A, B)) => U): Unit = { - if (node.left ne null) foreachNodeNonNull(node.left, f) - f((node.key, node.value)) - if (node.right ne null) foreachNodeNonNull(node.right, f) - } - - def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = { - def g(node: Node[A, _]): Unit = { - val l = node.left - if(l ne null) g(l) - f(node.key) - val r = node.right - if(r ne null) g(r) - } - val r = tree.root - if(r ne null) g(r) - } - - def foreachEntry[A, B, U](tree: Tree[A, B], f: (A, B) => U): Unit = { - def g(node: Node[A, B]): Unit = { - val l = node.left - if(l ne null) g(l) - f(node.key, node.value) - val r = node.right - if(r ne null) g(r) - } - val r = tree.root - if(r ne null) g(r) - } - - def transform[A, B](tree: Tree[A, B], f: (A, B) => B): Unit = transformNode(tree.root, f) - - private[this] def transformNode[A, B, U](node: Node[A, B], f: (A, B) => B): Unit = - if (node ne null) transformNodeNonNull(node, f) - - private[this] def transformNodeNonNull[A, B, U](node: Node[A, B], f: (A, B) => B): Unit = { - if (node.left ne null) transformNodeNonNull(node.left, f) - node.value = f(node.key, node.value) - if (node.right ne null) transformNodeNonNull(node.right, f) - } - - def iterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[(A, B)] = - new EntriesIterator(tree, start, end) - - def keysIterator[sealed A: Ordering](tree: Tree[A, _], start: Option[A] = None, end: Option[A] = None): Iterator[A] = - new KeysIterator(tree, start, end) - - def valuesIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[B] = - new ValuesIterator(tree, start, end) - - private[this] abstract class TreeIterator[sealed A, sealed B, R](tree: Tree[A, B], start: Option[A], end: Option[A]) - (implicit ord: Ordering[A]) extends AbstractIterator[R] { - - protected def nextResult(node: Node[A, B]): R - - def hasNext: Boolean = nextNode ne null - - @throws[NoSuchElementException] - def next(): R = nextNode match { - case null => throw new NoSuchElementException("next on empty iterator") - case node => - nextNode = successor(node) - setNullIfAfterEnd() - nextResult(node) - } - - private[this] var nextNode: Node[A, B] = start match { - case None => minNode(tree.root) - case Some(from) => minNodeAfter(tree.root, from) - } - - private[this] def setNullIfAfterEnd(): Unit = - if (end.isDefined && (nextNode ne null) && ord.compare(nextNode.key, end.get) >= 0) - nextNode = null - - setNullIfAfterEnd() - } - - private[this] final class EntriesIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A], end: Option[A]) - extends TreeIterator[A, B, (A, B)](tree, start, end) { - - def nextResult(node: Node[A, B]) = (node.key, node.value) - } - - private[this] final class KeysIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A], end: Option[A]) - extends TreeIterator[A, B, A](tree, start, end) { - - def nextResult(node: Node[A, B]) = node.key - } - - private[this] final class ValuesIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A], end: Option[A]) - extends TreeIterator[A, B, B](tree, start, end) { - - def nextResult(node: Node[A, B]) = node.value - } - - // ---- debugging ---- - - /** - * Checks if the tree is in a valid state. That happens if: - * - It is a valid binary search tree; - * - All red-black properties are satisfied; - * - All non-null nodes have their `parent` reference correct; - * - The size variable in `tree` corresponds to the actual size of the tree. - */ - def isValid[A: Ordering, B](tree: Tree[A, B]): Boolean = - isValidBST(tree.root) && hasProperParentRefs(tree) && isValidRedBlackTree(tree) && size(tree.root) == tree.size - - /** - * Returns true if all non-null nodes have their `parent` reference correct. - */ - private[this] def hasProperParentRefs[A, B](tree: Tree[A, B]): Boolean = { - - def hasProperParentRefs(node: Node[A, B]): Boolean = { - if (node eq null) true - else { - if ((node.left ne null) && (node.left.parent ne node) || - (node.right ne null) && (node.right.parent ne node)) false - else hasProperParentRefs(node.left) && hasProperParentRefs(node.right) - } - } - - if(tree.root eq null) true - else (tree.root.parent eq null) && hasProperParentRefs(tree.root) - } - - /** - * Returns true if this node follows the properties of a binary search tree. - */ - private[this] def isValidBST[A, B](node: Node[A, B])(implicit ord: Ordering[A]): Boolean = { - if (node eq null) true - else { - if ((node.left ne null) && (ord.compare(node.key, node.left.key) <= 0) || - (node.right ne null) && (ord.compare(node.key, node.right.key) >= 0)) false - else isValidBST(node.left) && isValidBST(node.right) - } - } - - /** - * Returns true if the tree has all the red-black tree properties: if the root node is black, if all children of red - * nodes are black and if the path from any node to any of its null children has the same number of black nodes. - */ - private[this] def isValidRedBlackTree[A, B](tree: Tree[A, B]): Boolean = { - - def noRedAfterRed(node: Node[A, B]): Boolean = { - if (node eq null) true - else if (node.red && (isRed(node.left) || isRed(node.right))) false - else noRedAfterRed(node.left) && noRedAfterRed(node.right) - } - - def blackHeight(node: Node[A, B]): Int = { - if (node eq null) 1 - else { - val lh = blackHeight(node.left) - val rh = blackHeight(node.right) - - if (lh == -1 || lh != rh) -1 - else if (isRed(node)) lh - else lh + 1 - } - } - - isBlack(tree.root) && noRedAfterRed(tree.root) && blackHeight(tree.root) >= 0 - } - - // building - - /** Build a Tree suitable for a TreeSet from an ordered sequence of keys */ - def fromOrderedKeys[sealed A](xs: Iterator[A], size: Int): Tree[A, Null] = { - val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes - def f(level: Int, size: Int): Node[A, Null] = size match { - case 0 => null - case 1 => new Node(xs.next(), null, level == maxUsedDepth && level != 1, null, null, null) - case n => - val leftSize = (size-1)/2 - val left = f(level+1, leftSize) - val x = xs.next() - val right = f(level+1, size-1-leftSize) - val n = new Node(x, null, false, left, right, null) - if(left ne null) left.parent = n - right.parent = n - n - } - new Tree(f(1, size), size) - } - - /** Build a Tree suitable for a TreeMap from an ordered sequence of key/value pairs */ - def fromOrderedEntries[sealed A, sealed B](xs: Iterator[(A, B)], size: Int): Tree[A, B] = { - val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes - def f(level: Int, size: Int): Node[A, B] = size match { - case 0 => null - case 1 => - val (k, v) = xs.next() - new Node(k, v, level == maxUsedDepth && level != 1, null, null, null) - case n => - val leftSize = (size-1)/2 - val left = f(level+1, leftSize) - val (k, v) = xs.next() - val right = f(level+1, size-1-leftSize) - val n = new Node(k, v, false, left, right, null) - if(left ne null) left.parent = n - right.parent = n - n - } - new Tree(f(1, size), size) - } - - def copyTree[sealed A, sealed B](n: Node[A, B]): Node[A, B] = - if(n eq null) null else { - val c = new Node(n.key, n.value, n.red, copyTree(n.left), copyTree(n.right), null) - if(c.left != null) c.left.parent = c - if(c.right != null) c.right.parent = c - c - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala deleted file mode 100644 index 246e525e37d9..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import language.experimental.captureChecking - -/** `ReusableBuilder` is a marker trait that indicates that a `Builder` - * can be reused to build more than one instance of a collection. In - * particular, calling `result()` followed by `clear()` will produce a - * collection and reset the builder to begin building a new collection - * of the same type. - * - * In general no method other than `clear()` may be called after `result()`. - * It is up to subclasses to implement and to document other allowed sequences - * of operations (e.g. calling other methods after `result()` in order to obtain - * different snapshots of a collection under construction). - * - * @tparam Elem the type of elements that get added to the builder. - * @tparam To the type of collection that it produced. - * - * @define multipleResults - * - * This Builder can be reused after calling `result()` without an - * intermediate call to `clear()` in order to build multiple related results. - */ -trait ReusableBuilder[-Elem, +To] extends Builder[Elem, To] { - /** Clears the contents of this builder. - * After execution of this method, the builder will contain no elements. - * - * If executed immediately after a call to `result()`, this allows a new - * instance of the same type of collection to be built. - */ - override def clear(): Unit // Note: overriding for Scaladoc only! - - /** Produces a collection from the added elements. - * - * After a call to `result`, the behavior of all other methods is undefined - * save for `clear()`. If `clear()` is called, then the builder is reset and - * may be used to build another instance. - * - * @return a collection containing the elements added to this builder. - */ - override def result(): To // Note: overriding for Scaladoc only! -} diff --git a/tests/pos-special/stdlib/collection/mutable/Set.scala b/tests/pos-special/stdlib/collection/mutable/Set.scala deleted file mode 100644 index 01384e993e89..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/Set.scala +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.mutable - -import scala.collection.{IterableFactory, IterableFactoryDefaults, IterableOps} -import language.experimental.captureChecking - -/** Base trait for mutable sets */ -trait Set[A] - extends Iterable[A] - with collection.Set[A] - with SetOps[A, Set, Set[A]] - with IterableFactoryDefaults[A, Set] { - - override def iterableFactory: IterableFactory[Set] = Set -} - -/** - * @define coll mutable set - * @define Coll `mutable.Set` - */ -trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]] - extends collection.SetOps[A, CC, C] - with IterableOps[A, CC, C] // only needed so we can use super[IterableOps] below - with Cloneable[C] - with Builder[A, C] - with Growable[A] - with Shrinkable[A] { - - def result(): C = coll - - /** Check whether the set contains the given element, and add it if not. - * - * @param elem the element to be added - * @return true if the element was added - */ - def add(elem: A): Boolean = - !contains(elem) && { - coll += elem; true - } - - /** Updates the presence of a single element in this set. - * - * This method allows one to add or remove an element `elem` - * from this set depending on the value of parameter `included`. - * Typically, one would use the following syntax: - * {{{ - * set(elem) = true // adds element - * set(elem) = false // removes element - * }}} - * - * @param elem the element to be added or removed - * @param included a flag indicating whether element should be included or excluded. - */ - def update(elem: A, included: Boolean): Unit = { - if (included) add(elem) - else remove(elem) - } - - /** Removes an element from this set. - * - * @param elem the element to be removed - * @return true if this set contained the element before it was removed - */ - def remove(elem: A): Boolean = { - val res = contains(elem) - coll -= elem - res - } - - def diff(that: collection.Set[A]): C = - foldLeft(empty)((result, elem) => if (that contains elem) result else result += elem) - - @deprecated("Use filterInPlace instead", "2.13.0") - @inline final def retain(p: A => Boolean): Unit = filterInPlace(p) - - /** Removes all elements from the set for which do not satisfy a predicate. - * @param p the predicate used to test elements. Only elements for - * which `p` returns `true` are retained in the set; all others - * are removed. - */ - def filterInPlace(p: A => Boolean): this.type = { - if (nonEmpty) { - val array = this.toArray[Any] // scala/bug#7269 toArray avoids ConcurrentModificationException - val arrayLength = array.length - var i = 0 - while (i < arrayLength) { - val elem = array(i).asInstanceOf[A] - if (!p(elem)) { - this -= elem - } - i += 1 - } - } - this - } - - override def clone(): C = empty ++= this - - override def knownSize: Int = super[IterableOps].knownSize -} - -/** - * $factoryInfo - * @define coll mutable set - * @define Coll `mutable.Set` - */ -@SerialVersionUID(3L) -object Set extends IterableFactory.Delegate[Set](HashSet) - - -/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ -abstract class AbstractSet[A] extends scala.collection.AbstractSet[A] with Set[A] diff --git a/tests/pos-special/stdlib/collection/mutable/SortedMap.scala b/tests/pos-special/stdlib/collection/mutable/SortedMap.scala deleted file mode 100644 index 8017177f5720..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/SortedMap.scala +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.mutable - -import scala.collection.{SortedMapFactory, SortedMapFactoryDefaults} -import language.experimental.captureChecking - -/** - * Base type for mutable sorted map collections - */ -trait SortedMap[K, V] - extends collection.SortedMap[K, V] - with Map[K, V] - with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] - with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map] { - - override def unsorted: Map[K, V] = this - - override def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap - - /** The same sorted map with a given default function. - * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. - * are not affected by `withDefault`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d the function mapping keys to values, used for non-present keys - * @return a wrapper of the map with a default value - */ - override def withDefault(d: K -> V): SortedMap[K, V] = new SortedMap.WithDefault[K, V](this, d) - - /** The same map with a given default value. - * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. - * are not affected by `withDefaultValue`. - * - * Invoking transformer methods (e.g. `map`) will not preserve the default value. - * - * @param d default value used for non-present keys - * @return a wrapper of the map with a default value - */ - override def withDefaultValue(d: V): SortedMap[K, V] = new SortedMap.WithDefault[K, V](this, _ => d) -} - -trait SortedMapOps[K, V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] - extends collection.SortedMapOps[K, V, CC, C] - with MapOps[K, V, Map, C] { - - def unsorted: Map[K, V] - - @deprecated("Use m.clone().addOne((k,v)) instead of m.updated(k, v)", "2.13.0") - override def updated[V1 >: V](key: K, value: V1): CC[K, V1] = - clone().asInstanceOf[CC[K, V1]].addOne((key, value)) -} - -@SerialVersionUID(3L) -object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { - - @SerialVersionUID(3L) - final class WithDefault[K, V](underlying: SortedMap[K, V], defaultValue: K -> V) - extends Map.WithDefault[K, V](underlying, defaultValue) - with SortedMap[K, V] - with SortedMapOps[K, V, SortedMap, WithDefault[K, V]] - with Serializable { - - override def sortedMapFactory: SortedMapFactory[SortedMap] = underlying.sortedMapFactory - - def iteratorFrom(start: K): scala.collection.Iterator[(K, V)] = underlying.iteratorFrom(start) - - def keysIteratorFrom(start: K): scala.collection.Iterator[K] = underlying.keysIteratorFrom(start) - - implicit def ordering: Ordering[K] = underlying.ordering - - def rangeImpl(from: Option[K], until: Option[K]): WithDefault[K, V] = - new WithDefault[K, V](underlying.rangeImpl(from, until), defaultValue) - - // Need to override following methods to match type signatures of `SortedMap.WithDefault` - // for operations preserving default value - override def subtractOne(elem: K): WithDefault.this.type = { underlying.subtractOne(elem); this } - - override def addOne(elem: (K, V)): WithDefault.this.type = { underlying.addOne(elem); this } - - override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) - - override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]^): SortedMap[K, V2] = - underlying.concat(suffix).withDefault(defaultValue) - - override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): WithDefault[K, V] = - new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue) - - override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] = - SortedMap.newBuilder.mapResult((p: SortedMap[K, V]) => new WithDefault[K, V](p, defaultValue)) - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/SortedSet.scala b/tests/pos-special/stdlib/collection/mutable/SortedSet.scala deleted file mode 100644 index e657fb749d7d..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/SortedSet.scala +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable -import language.experimental.captureChecking - -/** - * Base type for mutable sorted set collections - */ -trait SortedSet[A] - extends Set[A] - with collection.SortedSet[A] - with SortedSetOps[A, SortedSet, SortedSet[A]] - with SortedSetFactoryDefaults[A, SortedSet, Set] { - - override def unsorted: Set[A] = this - - override def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet -} - -/** - * @define coll mutable sorted set - * @define Coll `mutable.Sortedset` - */ -trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] - extends SetOps[A, Set, C] - with collection.SortedSetOps[A, CC, C] { - - def unsorted: Set[A] -} - -/** - * $factoryInfo - * @define coll mutable sorted set - * @define Coll `mutable.Sortedset` - */ -@SerialVersionUID(3L) -object SortedSet extends SortedIterableFactory.Delegate[SortedSet](TreeSet) diff --git a/tests/pos-special/stdlib/collection/mutable/Stack.scala b/tests/pos-special/stdlib/collection/mutable/Stack.scala deleted file mode 100644 index 4efa9621f374..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/Stack.scala +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection.mutable - -import scala.annotation.{migration, nowarn} -import scala.collection.generic.DefaultSerializable -import scala.collection.{IterableFactoryDefaults, IterableOnce, SeqFactory, StrictOptimizedSeqFactory, StrictOptimizedSeqOps} - -import language.experimental.captureChecking - -/** A stack implements a data structure which allows to store and retrieve - * objects in a last-in-first-out (LIFO) fashion. - * - * Note that operations which consume and produce iterables preserve order, - * rather than reversing it (as would be expected from building a new stack - * by pushing an element at a time). - * - * @tparam A type of the elements contained in this stack. - * - * @define Coll `Stack` - * @define coll stack - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@migration("Stack is now based on an ArrayDeque instead of a linked list", "2.13.0") -class Stack[sealed A] protected (array: Array[AnyRef], start: Int, end: Int) - extends ArrayDeque[A](array, start, end) - with IndexedSeqOps[A, Stack, Stack[A]] - with StrictOptimizedSeqOps[A, Stack, Stack[A]] - with IterableFactoryDefaults[A, Stack] - with ArrayDequeOps[A, Stack, Stack[A]] - with Cloneable[Stack[A]] - with DefaultSerializable { - - def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = - this(ArrayDeque.alloc(initialSize), start = 0, end = 0) - - override def iterableFactory: SeqFactory[Stack] = Stack - - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "Stack" - - /** - * Add elements to the top of this stack - * - * @param elem - * @return - */ - def push(elem: A): this.type = prepend(elem) - - /** Push two or more elements onto the stack. The last element - * of the sequence will be on top of the new stack. - * - * @param elems the element sequence. - * @return the stack with the new elements on top. - */ - def push(elem1: A, elem2: A, elems: A*): this.type = { - val k = elems.knownSize - ensureSize(length + (if(k >= 0) k + 2 else 3)) - prepend(elem1).prepend(elem2).pushAll(elems) - } - - /** Push all elements in the given iterable object onto the stack. The - * last element in the iterable object will be on top of the new stack. - * - * @param elems the iterable object. - * @return the stack with the new elements on top. - */ - def pushAll(elems: scala.collection.IterableOnce[A]): this.type = - prependAll(elems match { - case it: scala.collection.Seq[A] => it.view.reverse - case it => IndexedSeq.from(it).view.reverse - }) - - /** - * Removes the top element from this stack and return it - * - * @return - * @throws NoSuchElementException when stack is empty - */ - def pop(): A = removeHead() - - /** - * Pop all elements from this stack and return it - * - * @return The removed elements - */ - def popAll(): scala.collection.Seq[A] = removeAll() - - /** - * Returns and removes all elements from the top of this stack which satisfy the given predicate - * - * @param f the predicate used for choosing elements - * @return The removed elements - */ - def popWhile(f: A => Boolean): scala.collection.Seq[A] = removeHeadWhile(f) - - /** Returns the top element of the stack. This method will not remove - * the element from the stack. An error is signaled if there is no - * element on the stack. - * - * @throws NoSuchElementException - * @return the top element - */ - @`inline` final def top: A = head - - override protected def klone(): Stack[A] = { - val bf = newSpecificBuilder - bf ++= this - bf.result() - } - - override protected def ofArray(array: Array[AnyRef], end: Int): Stack[A] = - new Stack(array, start = 0, end) - -} - -/** - * $factoryInfo - * @define coll stack - * @define Coll `Stack` - */ -@SerialVersionUID(3L) -object Stack extends StrictOptimizedSeqFactory[Stack] { - - def from[sealed A](source: IterableOnce[A]^): Stack[A] = empty ++= source - - def empty[sealed A]: Stack[A] = new Stack - - def newBuilder[sealed A]: Builder[A, Stack[A]] = new GrowableBuilder[A, Stack[A]](empty) - -} diff --git a/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala b/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala index 5320fa1dabb0..c7859214821d 100644 --- a/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala +++ b/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala @@ -110,7 +110,7 @@ final class StringBuilder(val underlying: java.lang.StringBuilder) extends Abstr override def toString: String = result() - override def toArray[sealed B >: Char](implicit ct: scala.reflect.ClassTag[B]) = + override def toArray[B >: Char](implicit ct: scala.reflect.ClassTag[B]) = ct.runtimeClass match { case java.lang.Character.TYPE => toCharArray.asInstanceOf[Array[B]] case _ => super.toArray diff --git a/tests/pos-special/stdlib/collection/mutable/TreeMap.scala b/tests/pos-special/stdlib/collection/mutable/TreeMap.scala deleted file mode 100644 index f714a9ed46c2..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/TreeMap.scala +++ /dev/null @@ -1,258 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializable -import scala.collection.mutable.{RedBlackTree => RB} -import language.experimental.captureChecking - -/** - * A mutable sorted map implemented using a mutable red-black tree as underlying data structure. - * - * @param ordering the implicit ordering used to compare objects of type `A`. - * @tparam K the type of the keys contained in this tree map. - * @tparam V the type of the values associated with the keys. - * - * @define Coll mutable.TreeMap - * @define coll mutable tree map - */ -sealed class TreeMap[sealed K, sealed V] private (tree: RB.Tree[K, V])(implicit val ordering: Ordering[K]) - extends AbstractMap[K, V] - with SortedMap[K, V] - with SortedMapOps[K, V, TreeMap, TreeMap[K, V]] - with StrictOptimizedIterableOps[(K, V), Iterable, TreeMap[K, V]] - with StrictOptimizedMapOps[K, V, Map, TreeMap[K, V]] - with StrictOptimizedSortedMapOps[K, V, TreeMap, TreeMap[K, V]] - with SortedMapFactoryDefaults[K, V, TreeMap, Iterable, Map] - with DefaultSerializable { - - override def sortedMapFactory = TreeMap - - /** - * Creates an empty `TreeMap`. - * @param ord the implicit ordering used to compare objects of type `K`. - * @return an empty `TreeMap`. - */ - def this()(implicit ord: Ordering[K]) = this(RB.Tree.empty)(ord) - - def iterator: Iterator[(K, V)] = { - if (isEmpty) Iterator.empty - else RB.iterator(tree) - } - - override def keysIterator: Iterator[K] = { - if (isEmpty) Iterator.empty - else RB.keysIterator(tree, None) - } - - override def valuesIterator: Iterator[V] = { - if (isEmpty) Iterator.empty - else RB.valuesIterator(tree, None) - } - - def keysIteratorFrom(start: K): Iterator[K] = { - if (isEmpty) Iterator.empty - else RB.keysIterator(tree, Some(start)) - } - - def iteratorFrom(start: K): Iterator[(K, V)] = { - if (isEmpty) Iterator.empty - else RB.iterator(tree, Some(start)) - } - - override def valuesIteratorFrom(start: K): Iterator[V] = { - if (isEmpty) Iterator.empty - else RB.valuesIterator(tree, Some(start)) - } - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = - shape.parUnbox( - scala.collection.convert.impl.AnyBinaryTreeStepper.from[(K, V), RB.Node[K, V]]( - size, tree.root, _.left, _.right, x => (x.key, x.value) - ) - ) - - override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { - import scala.collection.convert.impl._ - type T = RB.Node[K, V] - val s = shape.shape match { - case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Int]) - case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Long]) - case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree.root, _.left, _.right, _.key.asInstanceOf[Double]) - case _ => shape.parUnbox(AnyBinaryTreeStepper.from[K, T](size, tree.root, _.left, _.right, _.key)) - } - s.asInstanceOf[S with EfficientSplit] - } - - override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { - import scala.collection.convert.impl._ - type T = RB.Node[K, V] - val s = shape.shape match { - case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Int]) - case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Long]) - case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Double]) - case _ => shape.parUnbox(AnyBinaryTreeStepper.from[V, T] (size, tree.root, _.left, _.right, _.value)) - } - s.asInstanceOf[S with EfficientSplit] - } - - def addOne(elem: (K, V)): this.type = { RB.insert(tree, elem._1, elem._2); this } - - def subtractOne(elem: K): this.type = { RB.delete(tree, elem); this } - - override def clear(): Unit = RB.clear(tree) - - def get(key: K): Option[V] = RB.get(tree, key) - - /** - * Creates a ranged projection of this map. Any mutations in the ranged projection will update the original map and - * vice versa. - * - * Only entries with keys between this projection's key range will ever appear as elements of this map, independently - * of whether the entries are added through the original map or through this view. That means that if one inserts a - * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the - * newly added entry. Mutations are always reflected in the original map, though. - * - * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower - * bound. - * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper - * bound. - */ - def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = new TreeMapProjection(from, until) - - override def foreach[U](f: ((K, V)) => U): Unit = RB.foreach(tree, f) - override def foreachEntry[U](f: (K, V) => U): Unit = RB.foreachEntry(tree, f) - - override def size: Int = RB.size(tree) - override def knownSize: Int = size - override def isEmpty: Boolean = RB.isEmpty(tree) - - override def contains(key: K): Boolean = RB.contains(tree, key) - - override def head: (K, V) = RB.min(tree).get - - override def last: (K, V) = RB.max(tree).get - - override def minAfter(key: K): Option[(K, V)] = RB.minAfter(tree, key) - - override def maxBefore(key: K): Option[(K, V)] = RB.maxBefore(tree, key) - - override protected[this] def className: String = "TreeMap" - - - /** - * A ranged projection of a [[TreeMap]]. Mutations on this map affect the original map and vice versa. - * - * Only entries with keys between this projection's key range will ever appear as elements of this map, independently - * of whether the entries are added through the original map or through this view. That means that if one inserts a - * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the - * newly added entry. Mutations are always reflected in the original map, though. - * - * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower - * bound. - * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper - * bound. - */ - private[this] final class TreeMapProjection(from: Option[K], until: Option[K]) extends TreeMap[K, V](tree) { - - /** - * Given a possible new lower bound, chooses and returns the most constraining one (the maximum). - */ - private[this] def pickLowerBound(newFrom: Option[K]): Option[K] = (from, newFrom) match { - case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr)) - case (None, _) => newFrom - case _ => from - } - - /** - * Given a possible new upper bound, chooses and returns the most constraining one (the minimum). - */ - private[this] def pickUpperBound(newUntil: Option[K]): Option[K] = (until, newUntil) match { - case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt)) - case (None, _) => newUntil - case _ => until - } - - /** - * Returns true if the argument is inside the view bounds (between `from` and `until`). - */ - private[this] def isInsideViewBounds(key: K): Boolean = { - val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0 - val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0 - afterFrom && beforeUntil - } - - override def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = - new TreeMapProjection(pickLowerBound(from), pickUpperBound(until)) - - override def get(key: K) = if (isInsideViewBounds(key)) RB.get(tree, key) else None - - override def iterator = if (RB.size(tree) == 0) Iterator.empty else RB.iterator(tree, from, until) - override def keysIterator: Iterator[K] = if (RB.size(tree) == 0) Iterator.empty else RB.keysIterator(tree, from, until) - override def valuesIterator: Iterator[V] = if (RB.size(tree) == 0) Iterator.empty else RB.valuesIterator(tree, from, until) - override def keysIteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.keysIterator(tree, pickLowerBound(Some(start)), until) - override def iteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.iterator(tree, pickLowerBound(Some(start)), until) - override def valuesIteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.valuesIterator(tree, pickLowerBound(Some(start)), until) - override def size = if (RB.size(tree) == 0) 0 else iterator.length - override def knownSize: Int = if (RB.size(tree) == 0) 0 else -1 - override def isEmpty = RB.size(tree) == 0 || !iterator.hasNext - override def contains(key: K) = isInsideViewBounds(key) && RB.contains(tree, key) - - override def head = headOption.get - override def headOption = { - val entry = if (from.isDefined) RB.minAfter(tree, from.get) else RB.min(tree) - (entry, until) match { - case (Some(e), Some(unt)) if ordering.compare(e._1, unt) >= 0 => None - case _ => entry - } - } - - override def last = lastOption.get - override def lastOption = { - val entry = if (until.isDefined) RB.maxBefore(tree, until.get) else RB.max(tree) - (entry, from) match { - case (Some(e), Some(fr)) if ordering.compare(e._1, fr) < 0 => None - case _ => entry - } - } - - // Using the iterator should be efficient enough; if performance is deemed a problem later, specialized - // `foreach(f, from, until)` and `transform(f, from, until)` methods can be created in `RedBlackTree`. See - // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this. - override def foreach[U](f: ((K, V)) => U): Unit = iterator.foreach(f) - - override def clone() = super.clone().rangeImpl(from, until) - } - -} - -/** - * $factoryInfo - * - * @define Coll mutable.TreeMap - * @define coll mutable tree map - */ -@SerialVersionUID(3L) -object TreeMap extends SortedMapFactory[TreeMap] { - - def from[sealed K : Ordering, sealed V](it: IterableOnce[(K, V)]^): TreeMap[K, V] = - Growable.from(empty[K, V], it) - - def empty[sealed K : Ordering, sealed V]: TreeMap[K, V] = new TreeMap[K, V]() - - def newBuilder[sealed K: Ordering, sealed V]: Builder[(K, V), TreeMap[K, V]] = new GrowableBuilder(empty[K, V]) - -} diff --git a/tests/pos-special/stdlib/collection/mutable/TreeSet.scala b/tests/pos-special/stdlib/collection/mutable/TreeSet.scala deleted file mode 100644 index 9ba439bea041..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/TreeSet.scala +++ /dev/null @@ -1,219 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection.mutable - -import scala.collection.Stepper.EfficientSplit -import scala.collection.generic.DefaultSerializable -import scala.collection.mutable.{RedBlackTree => RB} -import scala.collection.{SortedIterableFactory, SortedSetFactoryDefaults, Stepper, StepperShape, StrictOptimizedIterableOps, StrictOptimizedSortedSetOps, mutable} -import language.experimental.captureChecking - -/** - * A mutable sorted set implemented using a mutable red-black tree as underlying data structure. - * - * @param ordering the implicit ordering used to compare objects of type `A`. - * @tparam A the type of the keys contained in this tree set. - * - * @define Coll mutable.TreeSet - * @define coll mutable tree set - */ -// Original API designed in part by Lucien Pereira -sealed class TreeSet[sealed A] private (private val tree: RB.Tree[A, Null])(implicit val ordering: Ordering[A]) - extends AbstractSet[A] - with SortedSet[A] - with SortedSetOps[A, TreeSet, TreeSet[A]] - with StrictOptimizedIterableOps[A, Set, TreeSet[A]] - with StrictOptimizedSortedSetOps[A, TreeSet, TreeSet[A]] - with SortedSetFactoryDefaults[A, TreeSet, Set] - with DefaultSerializable { - - if (ordering eq null) - throw new NullPointerException("ordering must not be null") - - /** - * Creates an empty `TreeSet`. - * @param ord the implicit ordering used to compare objects of type `A`. - * @return an empty `TreeSet`. - */ - def this()(implicit ord: Ordering[A]) = this(RB.Tree.empty)(ord) - - override def sortedIterableFactory: SortedIterableFactory[TreeSet] = TreeSet - - def iterator: collection.Iterator[A] = RB.keysIterator(tree) - - def iteratorFrom(start: A): collection.Iterator[A] = RB.keysIterator(tree, Some(start)) - - override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { - import scala.collection.convert.impl._ - type T = RB.Node[A, Null] - val s = shape.shape match { - case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Int]) - case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Long]) - case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree.root, _.left, _.right, _.key.asInstanceOf[Double]) - case _ => shape.parUnbox(AnyBinaryTreeStepper.from[A, T](size, tree.root, _.left, _.right, _.key)) - } - s.asInstanceOf[S with EfficientSplit] - } - - def addOne(elem: A): this.type = { - RB.insert(tree, elem, null) - this - } - - def subtractOne(elem: A): this.type = { - RB.delete(tree, elem) - this - } - - def clear(): Unit = RB.clear(tree) - - def contains(elem: A): Boolean = RB.contains(tree, elem) - - def unconstrained: collection.Set[A] = this - - def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = new TreeSetProjection(from, until) - - override protected[this] def className: String = "TreeSet" - - override def size: Int = RB.size(tree) - override def knownSize: Int = size - override def isEmpty: Boolean = RB.isEmpty(tree) - - override def head: A = RB.minKey(tree).get - - override def last: A = RB.maxKey(tree).get - - override def minAfter(key: A): Option[A] = RB.minKeyAfter(tree, key) - - override def maxBefore(key: A): Option[A] = RB.maxKeyBefore(tree, key) - - override def foreach[U](f: A => U): Unit = RB.foreachKey(tree, f) - - - /** - * A ranged projection of a [[TreeSet]]. Mutations on this set affect the original set and vice versa. - * - * Only keys between this projection's key range will ever appear as elements of this set, independently of whether - * the elements are added through the original set or through this view. That means that if one inserts an element in - * a view whose key is outside the view's bounds, calls to `contains` will _not_ consider the newly added element. - * Mutations are always reflected in the original set, though. - * - * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower - * bound. - * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper - * bound. - */ - private[this] final class TreeSetProjection(from: Option[A], until: Option[A]) extends TreeSet[A](tree) { - - /** - * Given a possible new lower bound, chooses and returns the most constraining one (the maximum). - */ - private[this] def pickLowerBound(newFrom: Option[A]): Option[A] = (from, newFrom) match { - case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr)) - case (None, _) => newFrom - case _ => from - } - - /** - * Given a possible new upper bound, chooses and returns the most constraining one (the minimum). - */ - private[this] def pickUpperBound(newUntil: Option[A]): Option[A] = (until, newUntil) match { - case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt)) - case (None, _) => newUntil - case _ => until - } - - /** - * Returns true if the argument is inside the view bounds (between `from` and `until`). - */ - private[this] def isInsideViewBounds(key: A): Boolean = { - val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0 - val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0 - afterFrom && beforeUntil - } - - override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = - new TreeSetProjection(pickLowerBound(from), pickUpperBound(until)) - - override def contains(key: A) = isInsideViewBounds(key) && RB.contains(tree, key) - - override def iterator = RB.keysIterator(tree, from, until) - override def iteratorFrom(start: A) = RB.keysIterator(tree, pickLowerBound(Some(start)), until) - - override def size = if (RB.size(tree) == 0) 0 else iterator.length - override def knownSize: Int = if (RB.size(tree) == 0) 0 else -1 - override def isEmpty: Boolean = RB.size(tree) == 0 || !iterator.hasNext - - override def head: A = headOption.get - override def headOption: Option[A] = { - val elem = if (from.isDefined) RB.minKeyAfter(tree, from.get) else RB.minKey(tree) - (elem, until) match { - case (Some(e), Some(unt)) if ordering.compare(e, unt) >= 0 => None - case _ => elem - } - } - - override def last: A = lastOption.get - override def lastOption = { - val elem = if (until.isDefined) RB.maxKeyBefore(tree, until.get) else RB.maxKey(tree) - (elem, from) match { - case (Some(e), Some(fr)) if ordering.compare(e, fr) < 0 => None - case _ => elem - } - } - - // Using the iterator should be efficient enough; if performance is deemed a problem later, a specialized - // `foreachKey(f, from, until)` method can be created in `RedBlackTree`. See - // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this. - override def foreach[U](f: A => U): Unit = iterator.foreach(f) - - override def clone(): mutable.TreeSet[A] = super.clone().rangeImpl(from, until) - - } - -} - -/** - * $factoryInfo - * @define Coll `mutable.TreeSet` - * @define coll mutable tree set - */ -@SerialVersionUID(3L) -object TreeSet extends SortedIterableFactory[TreeSet] { - - def empty[sealed A : Ordering]: TreeSet[A] = new TreeSet[A]() - - def from[sealed E](it: IterableOnce[E]^)(implicit ordering: Ordering[E]): TreeSet[E] = - it match { - case ts: TreeSet[E] if ordering == ts.ordering => - new TreeSet[E](ts.tree.treeCopy()) - case ss: scala.collection.SortedSet[E] if ordering == ss.ordering => - new TreeSet[E](RB.fromOrderedKeys(ss.iterator, ss.size)) - case r: Range if (ordering eq Ordering.Int) || (ordering eq Ordering.Int.reverse) => - val it = if((ordering eq Ordering.Int) == (r.step > 0)) r.iterator else r.reverseIterator - new TreeSet[E](RB.fromOrderedKeys(it.asInstanceOf[Iterator[E]], r.size)) - case _ => - val t: RB.Tree[E, Null] = RB.Tree.empty - val i = it.iterator - while (i.hasNext) RB.insert(t, i.next(), null) - new TreeSet[E](t) - } - - def newBuilder[sealed A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = new ReusableBuilder[A, TreeSet[A]] { - private[this] var tree: RB.Tree[A, Null] = RB.Tree.empty - def addOne(elem: A): this.type = { RB.insert(tree, elem, null); this } - def result(): TreeSet[A] = new TreeSet[A](tree) - def clear(): Unit = { tree = RB.Tree.empty } - } -} diff --git a/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala b/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala deleted file mode 100644 index 2015b76a31b8..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala +++ /dev/null @@ -1,443 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -package mutable - -import scala.annotation.tailrec -import scala.collection.generic.DefaultSerializable -import scala.reflect.ClassTag -import scala.collection.immutable.Nil -import language.experimental.captureChecking - -/** A buffer that stores elements in an unrolled linked list. - * - * Unrolled linked lists store elements in linked fixed size - * arrays. - * - * Unrolled buffers retain locality and low memory overhead - * properties of array buffers, but offer much more efficient - * element addition, since they never reallocate and copy the - * internal array. - * - * However, they provide `O(n/m)` complexity random access, - * where `n` is the number of elements, and `m` the size of - * internal array chunks. - * - * Ideal to use when: - * - elements are added to the buffer and then all of the - * elements are traversed sequentially - * - two unrolled buffers need to be concatenated (see `concat`) - * - * Better than singly linked lists for random access, but - * should still be avoided for such a purpose. - * - * @define coll unrolled buffer - * @define Coll `UnrolledBuffer` - * - */ -@SerialVersionUID(3L) -sealed class UnrolledBuffer[sealed T](implicit val tag: ClassTag[T]) - extends AbstractBuffer[T] - with Buffer[T] - with Seq[T] - with SeqOps[T, UnrolledBuffer, UnrolledBuffer[T]] - with StrictOptimizedSeqOps[T, UnrolledBuffer, UnrolledBuffer[T]] - with EvidenceIterableFactoryDefaults[T, UnrolledBuffer, ClassTag] - with Builder[T, UnrolledBuffer[T]] - with DefaultSerializable { - - import UnrolledBuffer.Unrolled - - @transient private var headptr = newUnrolled - @transient private var lastptr = headptr - @transient private var sz = 0 - - private[collection] def headPtr = headptr - private[collection] def headPtr_=(head: Unrolled[T]) = headptr = head - private[collection] def lastPtr = lastptr - private[collection] def lastPtr_=(last: Unrolled[T]) = lastptr = last - private[collection] def size_=(s: Int) = sz = s - - protected def evidenceIterableFactory: UnrolledBuffer.type = UnrolledBuffer - protected def iterableEvidence: ClassTag[T] = tag - - override def iterableFactory: SeqFactory[UnrolledBuffer] = UnrolledBuffer.untagged - - protected def newUnrolled = new Unrolled[T](this) - - // The below would allow more flexible behavior without requiring inheritance - // that is risky because all the important internals are private. - // private var myLengthPolicy: Int => Int = x => x - // - // /** Specifies how the array lengths should vary. - // * - // * By default, `UnrolledBuffer` uses arrays of a fixed size. A length - // * policy can be given that changes this scheme to, for instance, an - // * exponential growth. - // * - // * @param nextLength computes the length of the next array from the length of the latest one - // */ - // def setLengthPolicy(nextLength: Int => Int): Unit = { myLengthPolicy = nextLength } - private[collection] def calcNextLength(sz: Int) = sz // myLengthPolicy(sz) - - def classTagCompanion = UnrolledBuffer - - /** Concatenates the target unrolled buffer to this unrolled buffer. - * - * The specified buffer `that` is cleared after this operation. This is - * an O(1) operation. - * - * @param that the unrolled buffer whose elements are added to this buffer - */ - def concat(that: UnrolledBuffer[T]) = { - // bind the two together - if (!lastptr.bind(that.headptr)) lastptr = that.lastPtr - - // update size - sz += that.sz - - // `that` is no longer usable, so clear it - // here we rely on the fact that `clear` allocates - // new nodes instead of modifying the previous ones - that.clear() - - // return a reference to this - this - } - - def addOne(elem: T) = { - lastptr = lastptr.append(elem) - sz += 1 - this - } - - def clear(): Unit = { - headptr = newUnrolled - lastptr = headptr - sz = 0 - } - - def iterator: Iterator[T] = new AbstractIterator[T] { - var pos: Int = -1 - var node: Unrolled[T] = headptr - scan() - - private def scan(): Unit = { - pos += 1 - while (pos >= node.size) { - pos = 0 - node = node.next - if (node eq null) return - } - } - def hasNext = node ne null - def next() = if (hasNext) { - val r = node.array(pos) - scan() - r - } else Iterator.empty.next() - } - - // this should be faster than the iterator - override def foreach[U](f: T => U) = headptr.foreach(f) - - def result() = this - - def length = sz - - override def knownSize: Int = sz - - def apply(idx: Int) = - if (idx >= 0 && idx < sz) headptr(idx) - else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") - - def update(idx: Int, newelem: T) = - if (idx >= 0 && idx < sz) headptr(idx) = newelem - else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") - - def mapInPlace(f: T => T): this.type = { - headptr.mapInPlace(f) - this - } - - def remove(idx: Int) = - if (idx >= 0 && idx < sz) { - sz -= 1 - headptr.remove(idx, this) - } else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") - - @tailrec final def remove(idx: Int, count: Int): Unit = - if (count > 0) { - remove(idx) - remove(idx, count-1) - } - - def prepend(elem: T) = { - headptr = headptr prepend elem - sz += 1 - this - } - - def insert(idx: Int, elem: T): Unit = - insertAll(idx, elem :: Nil) - - def insertAll(idx: Int, elems: IterableOnce[T]^): Unit = - if (idx >= 0 && idx <= sz) { - sz += headptr.insertAll(idx, elems, this) - } else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") - - override def subtractOne(elem: T): this.type = { - if (headptr.subtractOne(elem, this)) { - sz -= 1 - } - this - } - - def patchInPlace(from: Int, patch: collection.IterableOnce[T]^, replaced: Int): this.type = { - remove(from, replaced) - insertAll(from, patch) - this - } - - private def writeObject(out: java.io.ObjectOutputStream): Unit = { - out.defaultWriteObject - out writeInt sz - for (elem <- this) out writeObject elem - } - - private def readObject(in: java.io.ObjectInputStream): Unit = { - in.defaultReadObject - - val num = in.readInt - - headPtr = newUnrolled - lastPtr = headPtr - sz = 0 - var i = 0 - while (i < num) { - this += in.readObject.asInstanceOf[T] - i += 1 - } - } - - override def clone(): UnrolledBuffer[T] = new UnrolledBuffer[T] ++= this - - override protected[this] def className = "UnrolledBuffer" -} - - -@SerialVersionUID(3L) -object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] { self => - - val untagged: SeqFactory[UnrolledBuffer] = new ClassTagSeqFactory.AnySeqDelegate(self) - - def empty[sealed A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] - - def from[sealed A : ClassTag](source: scala.collection.IterableOnce[A]^): UnrolledBuffer[A] = newBuilder[A].addAll(source) - - def newBuilder[sealed A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] - - final val waterline: Int = 50 - - final def waterlineDenom: Int = 100 - - @deprecated("Use waterlineDenom instead.", "2.13.0") - final val waterlineDelim: Int = waterlineDenom - - private[collection] val unrolledlength = 32 - - /** Unrolled buffer node. - */ - class Unrolled[sealed T: ClassTag] private[collection] (var size: Int, var array: Array[T], var next: Unrolled[T], val buff: UnrolledBuffer[T] = null) { - private[collection] def this() = this(0, new Array[T](unrolledlength), null, null) - private[collection] def this(b: UnrolledBuffer[T]) = this(0, new Array[T](unrolledlength), null, b) - - private def nextlength = if (buff eq null) unrolledlength else buff.calcNextLength(array.length) - - // adds and returns itself or the new unrolled if full - @tailrec final def append(elem: T): Unrolled[T] = if (size < array.length) { - array(size) = elem - size += 1 - this - } else { - next = new Unrolled[T](0, new Array[T](nextlength), null, buff) - next append elem - } - def foreach[U](f: T => U): Unit = { - var unrolled = this - var i = 0 - while (unrolled ne null) { - val chunkarr = unrolled.array - val chunksz = unrolled.size - while (i < chunksz) { - val elem = chunkarr(i) - f(elem) - i += 1 - } - i = 0 - unrolled = unrolled.next - } - } - def mapInPlace(f: T => T): Unit = { - var unrolled = this - var i = 0 - while (unrolled ne null) { - val chunkarr = unrolled.array - val chunksz = unrolled.size - while (i < chunksz) { - val elem = chunkarr(i) - chunkarr(i) = f(elem) - i += 1 - } - i = 0 - unrolled = unrolled.next - } - } - @tailrec final def apply(idx: Int): T = - if (idx < size) array(idx) else next.apply(idx - size) - @tailrec final def update(idx: Int, newelem: T): Unit = - if (idx < size) array(idx) = newelem else next.update(idx - size, newelem) - @tailrec final def locate(idx: Int): Unrolled[T] = - if (idx < size) this else next.locate(idx - size) - def prepend(elem: T) = if (size < array.length) { - // shift the elements of the array right - // then insert the element - shiftright() - array(0) = elem - size += 1 - this - } else { - // allocate a new node and store element - // then make it point to this - val newhead = new Unrolled[T](buff) - newhead append elem - newhead.next = this - newhead - } - // shifts right assuming enough space - private def shiftright(): Unit = { - var i = size - 1 - while (i >= 0) { - array(i + 1) = array(i) - i -= 1 - } - } - // returns pointer to new last if changed - @tailrec final def remove(idx: Int, buffer: UnrolledBuffer[T]): T = - if (idx < size) { - // remove the element - // then try to merge with the next bucket - val r = array(idx) - shiftleft(idx) - size -= 1 - if (tryMergeWithNext()) buffer.lastPtr = this - r - } else next.remove(idx - size, buffer) - - @tailrec final def subtractOne(elem: T, buffer: UnrolledBuffer[T]): Boolean = { - var i = 0 - while (i < size) { - if(array(i) == elem) { - remove(i, buffer) - return true - } - i += 1 - } - if(next ne null) next.subtractOne(elem, buffer) else false - } - - // shifts left elements after `leftb` (overwrites `leftb`) - private def shiftleft(leftb: Int): Unit = { - var i = leftb - while (i < (size - 1)) { - array(i) = array(i + 1) - i += 1 - } - nullout(i, i + 1) - } - protected def tryMergeWithNext() = if (next != null && (size + next.size) < (array.length * waterline / waterlineDenom)) { - // copy the next array, then discard the next node - Array.copy(next.array, 0, array, size, next.size) - size = size + next.size - next = next.next - if (next eq null) true else false // checks if last node was thrown out - } else false - - @tailrec final def insertAll(idx: Int, t: scala.collection.IterableOnce[T]^, buffer: UnrolledBuffer[T]): Int = { - if (idx < size) { - // divide this node at the appropriate position and insert all into head - // update new next - val newnextnode = new Unrolled[T](0, new Array(array.length), null, buff) - Array.copy(array, idx, newnextnode.array, 0, size - idx) - newnextnode.size = size - idx - newnextnode.next = next - - // update this - nullout(idx, size) - size = idx - next = null - - // insert everything from iterable to this - var curr = this - var appended = 0 - for (elem <- t.iterator) { - curr = curr append elem - appended += 1 - } - curr.next = newnextnode - - // try to merge the last node of this with the newnextnode and fix tail pointer if needed - if (curr.tryMergeWithNext()) buffer.lastPtr = curr - else if (newnextnode.next eq null) buffer.lastPtr = newnextnode - appended - } - else if (idx == size || (next eq null)) { - var curr = this - var appended = 0 - for (elem <- t.iterator) { - curr = curr append elem - appended += 1 - } - appended - } - else next.insertAll(idx - size, t, buffer) - } - - private def nullout(from: Int, until: Int): Unit = { - var idx = from - while (idx < until) { - array(idx) = null.asInstanceOf[T] // TODO find a way to assign a default here!! - idx += 1 - } - } - - // assumes this is the last node - // `thathead` and `thatlast` are head and last node - // of the other unrolled list, respectively - def bind(thathead: Unrolled[T]) = { - assert(next eq null) - next = thathead - tryMergeWithNext() - } - - override def toString: String = - array.take(size).mkString("Unrolled@%08x".format(System.identityHashCode(this)) + "[" + size + "/" + array.length + "](", ", ", ")") + " -> " + (if (next ne null) next.toString else "") - } -} - -// This is used by scala.collection.parallel.mutable.UnrolledParArrayCombiner: -// Todo -- revisit whether inheritance is the best way to achieve this functionality -private[collection] class DoublingUnrolledBuffer[sealed T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) { - override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz - override protected def newUnrolled = new UnrolledBuffer.Unrolled[T](0, new Array[T](4), null, this) -} diff --git a/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala b/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala deleted file mode 100644 index a9498b7fc69b..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package mutable - -import scala.annotation.nowarn -import scala.collection.convert.JavaCollectionWrappers.{JMapWrapper, JMapWrapperLike} -import language.experimental.captureChecking - -/** A hash map with references to entries which are weakly reachable. Entries are - * removed from this map when the key is no longer (strongly) referenced. This class wraps - * `java.util.WeakHashMap`. - * - * @tparam K type of keys contained in this map - * @tparam V type of values associated with the keys - * - * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#weak-hash-maps "Scala's Collection Library overview"]] - * section on `Weak Hash Maps` for more information. - * - * @define Coll `WeakHashMap` - * @define coll weak hash map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ -@SerialVersionUID(3L) -class WeakHashMap[sealed K, sealed V] extends JMapWrapper[K, V](new java.util.WeakHashMap) - with JMapWrapperLike[K, V, WeakHashMap, WeakHashMap[K, V]] - with MapFactoryDefaults[K, V, WeakHashMap, Iterable] { - override def empty = new WeakHashMap[K, V] - override def mapFactory: MapFactory[WeakHashMap] = WeakHashMap - @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") - override protected[this] def stringPrefix = "WeakHashMap" -} - -/** $factoryInfo - * @define Coll `WeakHashMap` - * @define coll weak hash map - */ -@SerialVersionUID(3L) -object WeakHashMap extends MapFactory[WeakHashMap] { - def empty[sealed K, sealed V]: WeakHashMap[K,V] = new WeakHashMap[K, V] - def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^): WeakHashMap[K,V] = Growable.from(empty[K, V], it) - def newBuilder[sealed K, sealed V]: Builder[(K, V), WeakHashMap[K,V]] = new GrowableBuilder(WeakHashMap.empty[K, V]) -} - diff --git a/tests/pos-special/stdlib/collection/mutable/package.scala b/tests/pos-special/stdlib/collection/mutable/package.scala deleted file mode 100644 index d658ca5bc65a..000000000000 --- a/tests/pos-special/stdlib/collection/mutable/package.scala +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.collection -import language.experimental.captureChecking - - -package object mutable { - @deprecated("Use ArraySeq instead of WrappedArray; it can represent both, boxed and unboxed arrays", "2.13.0") - type WrappedArray[X] = ArraySeq[X] - @deprecated("Use ArraySeq instead of WrappedArray; it can represent both, boxed and unboxed arrays", "2.13.0") - val WrappedArray = ArraySeq - @deprecated("Use Iterable instead of Traversable", "2.13.0") - type Traversable[X] = Iterable[X] - @deprecated("Use Iterable instead of Traversable", "2.13.0") - val Traversable = Iterable - @deprecated("Use Stack instead of ArrayStack; it now uses an array-based implementation", "2.13.0") - type ArrayStack[X] = Stack[X] - @deprecated("Use Stack instead of ArrayStack; it now uses an array-based implementation", "2.13.0") - val ArrayStack = Stack - - @deprecated("mutable.LinearSeq has been removed; use LinearSeq with mutable.Seq instead", "2.13.0") - type LinearSeq[X] = Seq[X] with scala.collection.LinearSeq[X] - - @deprecated("GrowingBuilder has been renamed to GrowableBuilder", "2.13.0") - type GrowingBuilder[Elem, To <: Growable[Elem]] = GrowableBuilder[Elem, To] - - @deprecated("IndexedOptimizedSeq has been renamed to IndexedSeq", "2.13.0") - type IndexedOptimizedSeq[A] = IndexedSeq[A] - - @deprecated("IndexedOptimizedBuffer has been renamed to IndexedBuffer", "2.13.0") - type IndexedOptimizedBuffer[A] = IndexedBuffer[A] -} diff --git a/tests/pos-special/stdlib/collection/package.scala b/tests/pos-special/stdlib/collection/package.scala deleted file mode 100644 index ad4686be1fb2..000000000000 --- a/tests/pos-special/stdlib/collection/package.scala +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -import language.experimental.captureChecking - -package object collection { - @deprecated("Use Iterable instead of Traversable", "2.13.0") - type Traversable[+X] = Iterable[X] - @deprecated("Use Iterable instead of Traversable", "2.13.0") - val Traversable = Iterable - @deprecated("Use IterableOnce instead of TraversableOnce", "2.13.0") - type TraversableOnce[+X] = IterableOnce[X] - @deprecated("Use IterableOnce instead of TraversableOnce", "2.13.0") - val TraversableOnce = IterableOnce - @deprecated("Use SeqOps instead of SeqLike", "2.13.0") - type SeqLike[A, T] = SeqOps[A, Seq, T] - @deprecated("Use SeqOps (for the methods) or IndexedSeqOps (for fast indexed access) instead of ArrayLike", "2.13.0") - type ArrayLike[A] = SeqOps[A, Seq, Seq[A]] - - @deprecated("Gen* collection types have been removed", "2.13.0") - type GenTraversableOnce[+X] = IterableOnce[X] - @deprecated("Gen* collection types have been removed", "2.13.0") - val GenTraversableOnce = IterableOnce - @deprecated("Gen* collection types have been removed", "2.13.0") - type GenTraversable[+X] = Iterable[X] - @deprecated("Gen* collection types have been removed", "2.13.0") - val GenTraversable = Iterable - @deprecated("Gen* collection types have been removed", "2.13.0") - type GenIterable[+X] = Iterable[X] - @deprecated("Gen* collection types have been removed", "2.13.0") - val GenIterable = Iterable - @deprecated("Gen* collection types have been removed", "2.13.0") - type GenSeq[+X] = Seq[X] - @deprecated("Gen* collection types have been removed", "2.13.0") - val GenSeq = Seq - @deprecated("Gen* collection types have been removed", "2.13.0") - type GenSet[X] = Set[X] - @deprecated("Gen* collection types have been removed", "2.13.0") - val GenSet = Set - @deprecated("Gen* collection types have been removed", "2.13.0") - type GenMap[K, +V] = Map[K, V] - @deprecated("Gen* collection types have been removed", "2.13.0") - val GenMap = Map - - /** Needed to circumvent a difficulty between dotty and scalac concerning - * the right top type for a type parameter of kind * -> *. - * In Scalac, we can provide `Any`, as `Any` is kind-polymorphic. In dotty this is not allowed. - * In dotty, we can provide `[X] => Any`. But Scalac does not know lambda syntax. - */ - private[scala] type AnyConstr[X] = Any - - /** An extractor used to head/tail deconstruct sequences. */ - object +: { - /** Splits a sequence into head +: tail. - * @return Some((head, tail)) if sequence is non-empty. None otherwise. - */ - def unapply[A, CC[_] <: Seq[_], C <: SeqOps[A, CC, C]](t: C with SeqOps[A, CC, C]): Option[(A, C)] = - if(t.isEmpty) None - else Some(t.head -> t.tail) - } - - /** An extractor used to init/last deconstruct sequences. */ - object :+ { - /** Splits a sequence into init :+ last. - * @return Some((init, last)) if sequence is non-empty. None otherwise. - */ - def unapply[A, CC[_] <: Seq[_], C <: SeqOps[A, CC, C]](t: C with SeqOps[A, CC, C]): Option[(C, A)] = - if(t.isEmpty) None - else Some(t.init -> t.last) - } -} From fbb7d7114898179541b3ef6277d6677dd26523ee Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 30 Oct 2023 15:01:58 +0100 Subject: [PATCH 02/19] Rename annotation source file --- .../unchecked/uncheckedCapabilityLeaks.scala | 12 ------------ .../annotation/unchecked/uncheckedCaptures.scala | 12 ++++++++++++ 2 files changed, 12 insertions(+), 12 deletions(-) create mode 100644 library/src/scala/annotation/unchecked/uncheckedCaptures.scala diff --git a/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala b/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala index 477ac6d742f7..e69de29bb2d1 100644 --- a/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala +++ b/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala @@ -1,12 +0,0 @@ -package scala.annotation -package unchecked - -/** An annotation for mutable variables that are allowed to capture - * the root capability `cap`. Allowing this is not capture safe since - * it can cause leakage of capabilities from local scopes by assigning - * values retaining such capabilties to the annotated variable in - * an outer scope. - */ -class uncheckedCaptures extends StaticAnnotation - - diff --git a/library/src/scala/annotation/unchecked/uncheckedCaptures.scala b/library/src/scala/annotation/unchecked/uncheckedCaptures.scala new file mode 100644 index 000000000000..477ac6d742f7 --- /dev/null +++ b/library/src/scala/annotation/unchecked/uncheckedCaptures.scala @@ -0,0 +1,12 @@ +package scala.annotation +package unchecked + +/** An annotation for mutable variables that are allowed to capture + * the root capability `cap`. Allowing this is not capture safe since + * it can cause leakage of capabilities from local scopes by assigning + * values retaining such capabilties to the annotated variable in + * an outer scope. + */ +class uncheckedCaptures extends StaticAnnotation + + From d85db26ab0c49ad8f67f5628bcba1c970242b569 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 30 Oct 2023 18:30:51 +0100 Subject: [PATCH 03/19] Also count @Sealed annotated abstract types as sealed Also count abstract types that have a @Sealed annotation on their bound as sealed. That way, we get free propagation into synthesized type parameters. We should probably unify this scheme and `sealed` modifiers. --- compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index fab0689b4df2..c607e85e661d 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -163,7 +163,8 @@ object CheckCaptures: capt.println(i"disallow $t, $tp, $what, ${t.symbol.is(Sealed)}") t.info match case TypeBounds(_, hi) - if !t.symbol.is(Sealed) && !t.symbol.isParametricIn(carrier) => + if !t.symbol.is(Sealed) && !hi.hasAnnotation(defn.Caps_SealedAnnot) + && !t.symbol.isParametricIn(carrier) => if hi.isAny then report.error( em"""$what cannot $have $tp since From b5fe6d2400e1240127f0692ca21152165d8d29ba Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 30 Oct 2023 21:26:57 +0100 Subject: [PATCH 04/19] Coarse restriction to disallow local roots in external types This needs to be refined further for class members, similar to how we check that private types cannot escape from a class API. --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 16 ++++++++++++++ tests/neg-custom-args/captures/filevar.scala | 2 +- .../neg-custom-args/captures/localcaps.check | 12 ++++++++++ .../neg-custom-args/captures/localcaps.scala | 2 +- tests/neg-custom-args/captures/pairs.check | 8 +++++++ tests/neg-custom-args/captures/pairs.scala | 4 ++-- .../recursive-leaking-local-cap.scala | 22 +++++++++++++++++++ .../captures/sealed-classes.scala | 21 ++++++++++++++++++ 8 files changed, 83 insertions(+), 4 deletions(-) create mode 100644 tests/neg-custom-args/captures/localcaps.check create mode 100644 tests/neg-custom-args/captures/recursive-leaking-local-cap.scala create mode 100644 tests/neg-custom-args/captures/sealed-classes.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index c607e85e661d..b29809a69427 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -1299,6 +1299,20 @@ class CheckCaptures extends Recheck, SymTransformer: checker.traverse(tree.knownType) end healTypeParam + def checkNoLocalRootIn(sym: Symbol, info: Type, pos: SrcPos)(using Context): Unit = + val check = new TypeTraverser: + def traverse(tp: Type) = tp match + case tp: TermRef if tp.isLocalRootCapability => + if tp.localRootOwner == sym then + report.error(i"local root $tp cannot appear in type of $sym", pos) + case tp: ClassInfo => + traverseChildren(tp) + for mbr <- tp.decls do + if !mbr.is(Private) then checkNoLocalRootIn(sym, mbr.info, mbr.srcPos) + case _ => + traverseChildren(tp) + check.traverse(info) + /** Perform the following kinds of checks * - Check all explicitly written capturing types for well-formedness using `checkWellFormedPost`. * - Check that arguments of TypeApplys and AppliedTypes conform to their bounds. @@ -1322,6 +1336,8 @@ class CheckCaptures extends Recheck, SymTransformer: checkBounds(normArgs, tl) args.lazyZip(tl.paramNames).foreach(healTypeParam(_, _, fun.symbol)) case _ => + case _: ValOrDefDef | _: TypeDef => + checkNoLocalRootIn(tree.symbol, tree.symbol.info, tree.symbol.srcPos) case _ => end check end checker diff --git a/tests/neg-custom-args/captures/filevar.scala b/tests/neg-custom-args/captures/filevar.scala index c8280e2ff3b7..34588617c0b8 100644 --- a/tests/neg-custom-args/captures/filevar.scala +++ b/tests/neg-custom-args/captures/filevar.scala @@ -5,7 +5,7 @@ class File: def write(x: String): Unit = ??? class Service: - var file: File^{cap[Service]} = uninitialized + var file: File^{cap[Service]} = uninitialized // error def log = file.write("log") def withFile[T](op: (l: caps.Cap) ?-> (f: File^{l}) => T): T = diff --git a/tests/neg-custom-args/captures/localcaps.check b/tests/neg-custom-args/captures/localcaps.check new file mode 100644 index 000000000000..b09702749d10 --- /dev/null +++ b/tests/neg-custom-args/captures/localcaps.check @@ -0,0 +1,12 @@ +-- Error: tests/neg-custom-args/captures/localcaps.scala:4:12 ---------------------------------------------------------- +4 | def x: C^{cap[d]} = ??? // error + | ^^^^^^ + | `d` does not name an outer definition that represents a capture level +-- Error: tests/neg-custom-args/captures/localcaps.scala:9:47 ---------------------------------------------------------- +9 | private val z2 = identity((x: Int) => (c: C^{cap[z2]}) => x) // error + | ^^^^^^^ + | `z2` does not name an outer definition that represents a capture level +-- Error: tests/neg-custom-args/captures/localcaps.scala:6:6 ----------------------------------------------------------- +6 | def y: C^{cap[C]} = ??? // error + | ^ + | local root (cap[C] : caps.Cap) cannot appear in type of class C diff --git a/tests/neg-custom-args/captures/localcaps.scala b/tests/neg-custom-args/captures/localcaps.scala index f5227bfef96b..049a1ee0d775 100644 --- a/tests/neg-custom-args/captures/localcaps.scala +++ b/tests/neg-custom-args/captures/localcaps.scala @@ -3,7 +3,7 @@ class C: def x: C^{cap[d]} = ??? // error - def y: C^{cap[C]} = ??? // ok + def y: C^{cap[C]} = ??? // error private val z = (c0: caps.Cap) => (x: Int) => (c: C^{cap[C]}) => x // ok private val z2 = identity((x: Int) => (c: C^{cap[z2]}) => x) // error diff --git a/tests/neg-custom-args/captures/pairs.check b/tests/neg-custom-args/captures/pairs.check index 38712469879f..9d1b3a76e164 100644 --- a/tests/neg-custom-args/captures/pairs.check +++ b/tests/neg-custom-args/captures/pairs.check @@ -12,3 +12,11 @@ | Required: Cap^ ->{d} Unit | | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/pairs.scala:6:8 --------------------------------------------------------------- +6 | def fst: Cap^{cap[Pair]} ->{x} Unit = x // error + | ^ + | local root (cap[Pair] : caps.Cap) cannot appear in type of class Pair +-- Error: tests/neg-custom-args/captures/pairs.scala:7:8 --------------------------------------------------------------- +7 | def snd: Cap^{cap[Pair]} ->{y} Unit = y // error + | ^ + | local root (cap[Pair] : caps.Cap) cannot appear in type of class Pair diff --git a/tests/neg-custom-args/captures/pairs.scala b/tests/neg-custom-args/captures/pairs.scala index 4fc495d60f95..99b27639f729 100644 --- a/tests/neg-custom-args/captures/pairs.scala +++ b/tests/neg-custom-args/captures/pairs.scala @@ -3,8 +3,8 @@ object Monomorphic2: class Pair(x: Cap => Unit, y: Cap => Unit): - def fst: Cap^{cap[Pair]} ->{x} Unit = x - def snd: Cap^{cap[Pair]} ->{y} Unit = y + def fst: Cap^{cap[Pair]} ->{x} Unit = x // error + def snd: Cap^{cap[Pair]} ->{y} Unit = y // error def test(c: Cap, d: Cap) = def f(x: Cap): Unit = if c == x then () diff --git a/tests/neg-custom-args/captures/recursive-leaking-local-cap.scala b/tests/neg-custom-args/captures/recursive-leaking-local-cap.scala new file mode 100644 index 000000000000..0daecafbf9d0 --- /dev/null +++ b/tests/neg-custom-args/captures/recursive-leaking-local-cap.scala @@ -0,0 +1,22 @@ +import language.experimental.captureChecking +trait Cap: + def use: Int = 42 + +def usingCap[sealed T](op: Cap^ => T): T = ??? + +def badTest(): Unit = + def bad(b: Boolean)(c: Cap^): Cap^{cap[bad]} = // error + if b then c + else + val leaked = usingCap[Cap^{cap[bad]}](bad(true)) + leaked.use // boom + c + + usingCap[Unit]: c0 => + bad(false)(c0) + +class Bad: + def foo: Cap^{cap[Bad]} = ??? // error + private def bar: Cap^{cap[Bad]} = ??? // ok + + diff --git a/tests/neg-custom-args/captures/sealed-classes.scala b/tests/neg-custom-args/captures/sealed-classes.scala new file mode 100644 index 000000000000..b8cb0acbf5c5 --- /dev/null +++ b/tests/neg-custom-args/captures/sealed-classes.scala @@ -0,0 +1,21 @@ +abstract class C1[A1]: + def set(x: A1): Unit + def get: A1 + +trait Co[+A]: + def get: A + +class C2[sealed A2] extends C1[A2], Co[A2]: // ok + private var x: A2 = ??? + def set(x: A2): Unit = + this.x = x + def get: A2 = x + +class C3[A3] extends C2[A3] // error + +abstract class C4[sealed A4] extends Co[A4] // ok + +abstract class C5[sealed +A5] extends Co[A5] // ok + +abstract class C6[A6] extends C5[A6] // error + From 1fb06194404ae67decfd94383778f80d644ef57c Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 15:20:42 +0100 Subject: [PATCH 05/19] Require array element types to be sealed --- .../src/dotty/tools/dotc/cc/CaptureOps.scala | 6 ++++ .../src/dotty/tools/dotc/cc/CaptureSet.scala | 1 + .../dotty/tools/dotc/cc/CheckCaptures.scala | 31 ++++++++++++++----- .../dotty/tools/dotc/transform/Recheck.scala | 6 ++-- tests/neg-custom-args/captures/buffers.check | 26 ++++++++++++++++ tests/neg-custom-args/captures/buffers.scala | 30 ++++++++++++++++++ 6 files changed, 90 insertions(+), 10 deletions(-) create mode 100644 tests/neg-custom-args/captures/buffers.check create mode 100644 tests/neg-custom-args/captures/buffers.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index dccf07ba199e..0fe79da30ca5 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -206,6 +206,12 @@ extension (tp: Type) case _: TypeRef | _: AppliedType => tp.typeSymbol.hasAnnotation(defn.CapabilityAnnot) case _ => false + def isSealed(using Context): Boolean = tp match + case tp: TypeParamRef => tp.underlying.isSealed + case tp: TypeBounds => tp.hi.hasAnnotation(defn.Caps_SealedAnnot) + case tp: TypeRef => tp.symbol.is(Sealed) || tp.info.isSealed // TODO: drop symbol flag? + case _ => false + /** Drop @retains annotations everywhere */ def dropAllRetains(using Context): Type = // TODO we should drop retains from inferred types before unpickling val tm = new TypeMap: diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 2586d449dfd4..7261c760aa01 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -872,6 +872,7 @@ object CaptureSet: upper.isAlwaysEmpty || upper.isConst && upper.elems.size == 1 && upper.elems.contains(r1) if variance > 0 || isExact then upper else if variance < 0 then CaptureSet.empty + else if ctx.mode.is(Mode.Printing) then upper else assert(false, i"trying to add $upper from $r via ${tm.getClass} in a non-variant setting") /** Apply `f` to each element in `xs`, and join result sets with `++` */ diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index b29809a69427..bd27fd30580b 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -148,7 +148,7 @@ object CheckCaptures: val check = new TypeTraverser: extension (tparam: Symbol) def isParametricIn(carrier: Symbol): Boolean = - val encl = carrier.owner.enclosingMethodOrClass + val encl = carrier.maybeOwner.enclosingMethodOrClass if encl.isClass then tparam.isParametricIn(encl) else def recur(encl: Symbol): Boolean = @@ -160,11 +160,9 @@ object CheckCaptures: def traverse(t: Type) = t.dealiasKeepAnnots match case t: TypeRef => - capt.println(i"disallow $t, $tp, $what, ${t.symbol.is(Sealed)}") + capt.println(i"disallow $t, $tp, $what, ${t.isSealed}") t.info match - case TypeBounds(_, hi) - if !t.symbol.is(Sealed) && !hi.hasAnnotation(defn.Caps_SealedAnnot) - && !t.symbol.isParametricIn(carrier) => + case TypeBounds(_, hi) if !t.isSealed && !t.symbol.isParametricIn(carrier) => if hi.isAny then report.error( em"""$what cannot $have $tp since @@ -543,8 +541,8 @@ class CheckCaptures extends Recheck, SymTransformer: val TypeApply(fn, args) = tree val polyType = atPhase(thisPhase.prev): fn.tpe.widen.asInstanceOf[TypeLambda] - for case (arg: TypeTree, pinfo, pname) <- args.lazyZip(polyType.paramInfos).lazyZip((polyType.paramNames)) do - if pinfo.bounds.hi.hasAnnotation(defn.Caps_SealedAnnot) then + for case (arg: TypeTree, formal, pname) <- args.lazyZip(polyType.paramRefs).lazyZip((polyType.paramNames)) do + if formal.isSealed then def where = if fn.symbol.exists then i" in an argument of ${fn.symbol}" else "" disallowRootCapabilitiesIn(arg.knownType, fn.symbol, i"Sealed type variable $pname", "be instantiated to", @@ -1313,6 +1311,23 @@ class CheckCaptures extends Recheck, SymTransformer: traverseChildren(tp) check.traverse(info) + def checkArraysAreSealedIn(tp: Type, pos: SrcPos)(using Context): Unit = + val check = new TypeTraverser: + def traverse(t: Type): Unit = + t match + case AppliedType(tycon, arg :: Nil) if tycon.typeSymbol == defn.ArrayClass => + if !(pos.span.isSynthetic && ctx.reporter.errorsReported) then + CheckCaptures.disallowRootCapabilitiesIn(arg, NoSymbol, + "Array", "have element type", + "Since arrays are mutable, they have to be treated like variables,\nso their element type must be sealed.", + pos) + traverseChildren(t) + case defn.RefinedFunctionOf(rinfo: MethodType) => + traverse(rinfo) + case _ => + traverseChildren(t) + check.traverse(tp) + /** Perform the following kinds of checks * - Check all explicitly written capturing types for well-formedness using `checkWellFormedPost`. * - Check that arguments of TypeApplys and AppliedTypes conform to their bounds. @@ -1338,6 +1353,8 @@ class CheckCaptures extends Recheck, SymTransformer: case _ => case _: ValOrDefDef | _: TypeDef => checkNoLocalRootIn(tree.symbol, tree.symbol.info, tree.symbol.srcPos) + case tree: TypeTree => + checkArraysAreSealedIn(tree.tpe, tree.srcPos) case _ => end check end checker diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index 9833b3cf177f..b15a58b98b6f 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -596,9 +596,9 @@ abstract class Recheck extends Phase, SymTransformer: /** Show tree with rechecked types instead of the types stored in the `.tpe` field */ override def show(tree: untpd.Tree)(using Context): String = - atPhase(thisPhase) { - super.show(addRecheckedTypes.transform(tree.asInstanceOf[tpd.Tree])) - } + atPhase(thisPhase): + withMode(Mode.Printing): + super.show(addRecheckedTypes.transform(tree.asInstanceOf[tpd.Tree])) end Recheck /** A class that can be used to test basic rechecking without any customaization */ diff --git a/tests/neg-custom-args/captures/buffers.check b/tests/neg-custom-args/captures/buffers.check new file mode 100644 index 000000000000..cdb7baa852fb --- /dev/null +++ b/tests/neg-custom-args/captures/buffers.check @@ -0,0 +1,26 @@ +-- Error: tests/neg-custom-args/captures/buffers.scala:11:6 ------------------------------------------------------------ +11 | var elems: Array[A] = new Array[A](10) // error // error + | ^ + | mutable variable elems cannot have type Array[A] since + | that type refers to the type variable A, which is not sealed. +-- Error: tests/neg-custom-args/captures/buffers.scala:16:38 ----------------------------------------------------------- +16 | def make[A: ClassTag](xs: A*) = new ArrayBuffer: // error + | ^^^^^^^^^^^ + | Sealed type variable A cannot be instantiated to box A^? since + | that type refers to the type variable A, which is not sealed. + | This is often caused by a local capability in an argument of constructor ArrayBuffer + | leaking as part of its result. +-- Error: tests/neg-custom-args/captures/buffers.scala:11:13 ----------------------------------------------------------- +11 | var elems: Array[A] = new Array[A](10) // error // error + | ^^^^^^^^ + | Array cannot have element type A since + | that type refers to the type variable A, which is not sealed. + | Since arrays are mutable, they have to be treated like variables, + | so their element type must be sealed. +-- Error: tests/neg-custom-args/captures/buffers.scala:22:9 ------------------------------------------------------------ +22 | val x: Array[A] = new Array[A](10) // error + | ^^^^^^^^ + | Array cannot have element type A since + | that type refers to the type variable A, which is not sealed. + | Since arrays are mutable, they have to be treated like variables, + | so their element type must be sealed. diff --git a/tests/neg-custom-args/captures/buffers.scala b/tests/neg-custom-args/captures/buffers.scala new file mode 100644 index 000000000000..760ddab96ae5 --- /dev/null +++ b/tests/neg-custom-args/captures/buffers.scala @@ -0,0 +1,30 @@ +import reflect.ClassTag + +class Buffer[A] + +class ArrayBuffer[sealed A: ClassTag] extends Buffer[A]: + var elems: Array[A] = new Array[A](10) + def add(x: A): this.type = ??? + def at(i: Int): A = ??? + +class ArrayBufferBAD[A: ClassTag] extends Buffer[A]: + var elems: Array[A] = new Array[A](10) // error // error + def add(x: A): this.type = ??? + def at(i: Int): A = ??? + +object ArrayBuffer: + def make[A: ClassTag](xs: A*) = new ArrayBuffer: // error + elems = xs.toArray + def apply[sealed A: ClassTag](xs: A*) = new ArrayBuffer: + elems = xs.toArray // ok + +class EncapsArray[A: ClassTag]: + val x: Array[A] = new Array[A](10) // error + + + + + + + + From 8540fb84da0d4f9beca0075194fd28ab21866ed4 Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 17:05:03 +0100 Subject: [PATCH 06/19] Don't generate capture set variables for self types of pure classes The tricky thing here is how to recognize that a class is pure since that is known only during capture checking and we are at Setup, the phase before. But we can approximate by treating the `Pure` trait as definitely pure. --- compiler/src/dotty/tools/dotc/cc/Setup.scala | 4 ++- .../dotty/tools/dotc/core/Definitions.scala | 2 +- tests/pos-custom-args/captures/steppers.scala | 27 +++++++++++++++++++ 3 files changed, 31 insertions(+), 2 deletions(-) create mode 100644 tests/pos-custom-args/captures/steppers.scala diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 68fd79048f41..e90a8394f87d 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -522,7 +522,9 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: tree.symbol match case cls: ClassSymbol => val cinfo @ ClassInfo(prefix, _, ps, decls, selfInfo) = cls.classInfo - if (selfInfo eq NoType) || cls.is(ModuleClass) && !cls.isStatic then + if ((selfInfo eq NoType) || cls.is(ModuleClass) && !cls.isStatic) + && !cls.isPureClass + then // add capture set to self type of nested classes if no self type is given explicitly. val newSelfType = CapturingType(cinfo.selfType, CaptureSet.Var(cls)) val ps1 = inContext(ctx.withOwner(cls)): diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 205d43cd07ca..40370973ebf0 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -1443,7 +1443,7 @@ class Definitions { /** Base classes that are assumed to be pure for the purposes of capture checking. * Every class inheriting from a pure baseclass is pure. */ - @tu lazy val pureBaseClasses = Set(defn.ThrowableClass) + @tu lazy val pureBaseClasses = Set(ThrowableClass, PureClass) /** Non-inheritable lasses that are assumed to be pure for the purposes of capture checking, */ diff --git a/tests/pos-custom-args/captures/steppers.scala b/tests/pos-custom-args/captures/steppers.scala new file mode 100644 index 000000000000..815ac938b492 --- /dev/null +++ b/tests/pos-custom-args/captures/steppers.scala @@ -0,0 +1,27 @@ + +trait Stepper[+A]: + this: Stepper[A]^ => + +object Stepper: + trait EfficientSplit + +sealed trait StepperShape[-T, S <: Stepper[_]^] extends Pure + +trait IterableOnce[+A] extends Any: + this: IterableOnce[A]^ => + def stepper[S <: Stepper[_]^{this}](implicit shape: StepperShape[A, S]): S = ??? + +sealed abstract class ArraySeq[sealed T] extends IterableOnce[T], Pure: + def array: Array[_] + + def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq[T] = + val arr = array.asInstanceOf[Array[T]].sorted(ord.asInstanceOf[Ordering[Any]]).asInstanceOf[Array[T]] + ArraySeq.make(arr).asInstanceOf[ArraySeq[T]] + +object ArraySeq: + + def make[sealed T](x: Array[T]): ArraySeq[T] = ??? + + final class ofRef[T <: AnyRef](val array: Array[T]) extends ArraySeq[T], Pure: + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S & Stepper.EfficientSplit = ??? + From b50fe548b63d71da5486bd0533d15f730a058faf Mon Sep 17 00:00:00 2001 From: odersky Date: Tue, 31 Oct 2023 22:54:28 +0100 Subject: [PATCH 07/19] Avoid reporting post check messages several times --- compiler/src/dotty/tools/dotc/cc/Setup.scala | 1 + .../tools/dotc/reporting/UniqueMessagePositions.scala | 10 +++++----- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index e90a8394f87d..8ba53693870c 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -707,4 +707,5 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: def postCheck()(using Context): Unit = for chk <- todoAtPostCheck do chk(ctx) + todoAtPostCheck.clear() end Setup \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala index 98fd7da3032a..71b2636ab8ed 100644 --- a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala +++ b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala @@ -25,14 +25,14 @@ trait UniqueMessagePositions extends Reporter { || dia.pos.exists && !ctx.settings.YshowSuppressedErrors.value - && (dia.pos.start to dia.pos.end).exists(pos => - positions.get((ctx.source, pos)).exists(_.hides(dia))) + && (dia.pos.start to dia.pos.end).exists: offset => + positions.get((ctx.source, offset)).exists(_.hides(dia)) override def markReported(dia: Diagnostic)(using Context): Unit = if dia.pos.exists then - for (pos <- dia.pos.start to dia.pos.end) - positions.get(ctx.source, pos) match + for offset <- dia.pos.start to dia.pos.end do + positions.get((ctx.source, offset)) match case Some(dia1) if dia1.hides(dia) => - case _ => positions((ctx.source, pos)) = dia + case _ => positions((ctx.source, offset)) = dia super.markReported(dia) } From 876e5ee8a1d872d86f751751b58872d764295974 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 10:37:22 +0100 Subject: [PATCH 08/19] Don't flag wildcard array arguments for not being sealed --- .../src/dotty/tools/dotc/cc/CheckCaptures.scala | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index bd27fd30580b..94aff4d314a4 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -21,7 +21,7 @@ import Recheck.* import scala.collection.mutable import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap, CompareResult} import StdNames.nme -import NameKinds.DefaultGetterName +import NameKinds.{DefaultGetterName, WildcardParamName} import reporting.trace /** The capture checker */ @@ -1316,7 +1316,9 @@ class CheckCaptures extends Recheck, SymTransformer: def traverse(t: Type): Unit = t match case AppliedType(tycon, arg :: Nil) if tycon.typeSymbol == defn.ArrayClass => - if !(pos.span.isSynthetic && ctx.reporter.errorsReported) then + if !(pos.span.isSynthetic && ctx.reporter.errorsReported) + && !arg.typeSymbol.name.is(WildcardParamName) + then CheckCaptures.disallowRootCapabilitiesIn(arg, NoSymbol, "Array", "have element type", "Since arrays are mutable, they have to be treated like variables,\nso their element type must be sealed.", @@ -1339,10 +1341,11 @@ class CheckCaptures extends Recheck, SymTransformer: val lctx = tree match case _: DefTree | _: TypeDef if tree.symbol.exists => ctx.withOwner(tree.symbol) case _ => ctx - traverseChildren(tree)(using lctx) - check(tree) + trace(i"post check $tree"): + traverseChildren(tree)(using lctx) + check(tree) def check(tree: Tree)(using Context) = tree match - case t @ TypeApply(fun, args) => + case TypeApply(fun, args) => fun.knownType.widen match case tl: PolyType => val normArgs = args.lazyZip(tl.paramInfos).map: (arg, bounds) => From 84f313caf35baa9e35693395a85657272f78a9af Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 11:36:27 +0100 Subject: [PATCH 09/19] Fix isPureClass test --- compiler/src/dotty/tools/dotc/cc/CaptureOps.scala | 6 +++++- .../captures/exception-definitions.check | 8 ++++---- tests/neg-custom-args/captures/leaked-curried.check | 11 ++++------- tests/neg-custom-args/captures/leaked-curried.scala | 4 ++-- 4 files changed, 15 insertions(+), 14 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 0fe79da30ca5..40e94ebde5dd 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -231,7 +231,11 @@ extension (cls: ClassSymbol) && bc.givenSelfType.dealiasKeepAnnots.match case CapturingType(_, refs) => refs.isAlwaysEmpty case RetainingType(_, refs) => refs.isEmpty - case selfType => selfType.exists && selfType.captureSet.isAlwaysEmpty + case selfType => + isCaptureChecking // At Setup we have not processed self types yet, so + // unless a self type is explicitly given, we can't tell + // and err on the side of impure. + && selfType.exists && selfType.captureSet.isAlwaysEmpty extension (sym: Symbol) diff --git a/tests/neg-custom-args/captures/exception-definitions.check b/tests/neg-custom-args/captures/exception-definitions.check index 16d623e64f7c..4b1fe0273f52 100644 --- a/tests/neg-custom-args/captures/exception-definitions.check +++ b/tests/neg-custom-args/captures/exception-definitions.check @@ -6,8 +6,8 @@ -- Error: tests/neg-custom-args/captures/exception-definitions.scala:7:12 ---------------------------------------------- 7 | val x = c // error | ^ - |(c : Any^) cannot be referenced here; it is not included in the allowed capture set {} of pure base class class Throwable --- Error: tests/neg-custom-args/captures/exception-definitions.scala:8:8 ----------------------------------------------- + |(c : Any^) cannot be referenced here; it is not included in the allowed capture set {} of the self type of class Err2 +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:8:13 ---------------------------------------------- 8 | class Err3(c: Any^) extends Exception // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | reference (Err3.this.c : Any^) is not included in the allowed capture set {} of pure base class class Throwable + | ^ + | reference (Err3.this.c : Any^) is not included in the allowed capture set {} of the self type of class Err3 diff --git a/tests/neg-custom-args/captures/leaked-curried.check b/tests/neg-custom-args/captures/leaked-curried.check index c23d1516acf5..3f0a9800a4ec 100644 --- a/tests/neg-custom-args/captures/leaked-curried.check +++ b/tests/neg-custom-args/captures/leaked-curried.check @@ -2,10 +2,7 @@ 14 | () => () => io // error | ^^ |(io : Cap^) cannot be referenced here; it is not included in the allowed capture set {} of the self type of class Fuzz --- [E058] Type Mismatch Error: tests/neg-custom-args/captures/leaked-curried.scala:15:10 ------------------------------- -15 | class Foo extends Box, Pure: // error - | ^ - | illegal inheritance: self type Foo^{io} of class Foo does not conform to self type Pure - | of parent trait Pure - | - | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/leaked-curried.scala:17:20 ---------------------------------------------------- +17 | () => () => io // error + | ^^ + |(io : Cap^) cannot be referenced here; it is not included in the allowed capture set {} of the self type of class Foo diff --git a/tests/neg-custom-args/captures/leaked-curried.scala b/tests/neg-custom-args/captures/leaked-curried.scala index a7c48219b450..f9238259e065 100644 --- a/tests/neg-custom-args/captures/leaked-curried.scala +++ b/tests/neg-custom-args/captures/leaked-curried.scala @@ -12,8 +12,8 @@ def main(): Unit = self => val get: () ->{} () ->{io} Cap^ = () => () => io // error - class Foo extends Box, Pure: // error + class Foo extends Box, Pure: val get: () ->{} () ->{io} Cap^ = - () => () => io + () => () => io // error new Foo val bad = leaked.get()().use() // using a leaked capability From f663665f607023ede1e5d8550f751f34c57dd874 Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 15:19:47 +0100 Subject: [PATCH 10/19] Make sealed an annotation # Conflicts: # tests/pos-special/stdlib/collection/ArrayOps.scala --- compiler/src/dotty/tools/dotc/typer/Namer.scala | 9 ++++++++- tests/neg/class-mods.scala | 2 +- tests/pos-custom-args/captures/sealed-value-class.scala | 3 +++ 3 files changed, 12 insertions(+), 2 deletions(-) create mode 100644 tests/pos-custom-args/captures/sealed-value-class.scala diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 7ef552e3661c..5361f37c2a76 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -1042,7 +1042,14 @@ class Namer { typer: Typer => tp val rhs1 = typedAheadType(rhs) - val rhsBodyType: TypeBounds = addVariances(rhs1.tpe).toBounds + val rhsBodyType: TypeBounds = + val bounds = addVariances(rhs1.tpe).toBounds + if sym.is(Sealed) then + sym.resetFlag(Sealed) + bounds.derivedTypeBounds(bounds.lo, + AnnotatedType(bounds.hi, Annotation(defn.Caps_SealedAnnot, rhs1.span))) + else bounds + val unsafeInfo = if (isDerived) rhsBodyType else abstracted(rhsBodyType) def opaqueToBounds(info: Type): Type = diff --git a/tests/neg/class-mods.scala b/tests/neg/class-mods.scala index 60e9fb279364..cf4348ad42d7 100644 --- a/tests/neg/class-mods.scala +++ b/tests/neg/class-mods.scala @@ -2,7 +2,7 @@ open final class Foo1 // error sealed open class Foo2 // error open type T1 // error -sealed type T2 // error +type T2 // ok abstract type T3 // error abstract open type T4 // error diff --git a/tests/pos-custom-args/captures/sealed-value-class.scala b/tests/pos-custom-args/captures/sealed-value-class.scala new file mode 100644 index 000000000000..b5f25bf2d203 --- /dev/null +++ b/tests/pos-custom-args/captures/sealed-value-class.scala @@ -0,0 +1,3 @@ +class Ops[sealed A](xs: Array[A]) extends AnyVal: + + def f(p: A => Boolean): Array[A] = xs From d778a3dea59bf0796931de1446fddc972c4c65df Mon Sep 17 00:00:00 2001 From: odersky Date: Wed, 1 Nov 2023 18:31:56 +0100 Subject: [PATCH 11/19] Avoid infinite recursions when checking F-bounded types --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 32 +++++++++++-------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 94aff4d314a4..a8ff9d3d5955 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -14,7 +14,7 @@ import typer.RefChecks.{checkAllOverrides, checkSelfAgainstParents, OverridingPa import typer.Checking.{checkBounds, checkAppliedTypesIn} import typer.ErrorReporting.{Addenda, err} import typer.ProtoTypes.{AnySelectionProto, LhsProto} -import util.{SimpleIdentitySet, EqHashMap, SrcPos, Property} +import util.{SimpleIdentitySet, EqHashMap, EqHashSet, SrcPos, Property} import transform.SymUtils.* import transform.{Recheck, PreRecheck} import Recheck.* @@ -147,6 +147,8 @@ object CheckCaptures: private def disallowRootCapabilitiesIn(tp: Type, carrier: Symbol, what: String, have: String, addendum: String, pos: SrcPos)(using Context) = val check = new TypeTraverser: + private val seen = new EqHashSet[TypeRef] + extension (tparam: Symbol) def isParametricIn(carrier: Symbol): Boolean = val encl = carrier.maybeOwner.enclosingMethodOrClass if encl.isClass then tparam.isParametricIn(encl) @@ -160,19 +162,21 @@ object CheckCaptures: def traverse(t: Type) = t.dealiasKeepAnnots match case t: TypeRef => - capt.println(i"disallow $t, $tp, $what, ${t.isSealed}") - t.info match - case TypeBounds(_, hi) if !t.isSealed && !t.symbol.isParametricIn(carrier) => - if hi.isAny then - report.error( - em"""$what cannot $have $tp since - |that type refers to the type variable $t, which is not sealed. - |$addendum""", - pos) - else - traverse(hi) - case _ => - traverseChildren(t) + if !seen.contains(t) then + capt.println(i"disallow $t, $tp, $what, ${t.isSealed}") + seen += t + t.info match + case TypeBounds(_, hi) if !t.isSealed && !t.symbol.isParametricIn(carrier) => + if hi.isAny then + report.error( + em"""$what cannot $have $tp since + |that type refers to the type variable $t, which is not sealed. + |$addendum""", + pos) + else + traverse(hi) + case _ => + traverseChildren(t) case AnnotatedType(_, ann) if ann.symbol == defn.UncheckedCapturesAnnot => () case t => From e033cd1631803d1d874cd179076c537e3e9b2e8f Mon Sep 17 00:00:00 2001 From: odersky Date: Thu, 2 Nov 2023 21:43:46 +0100 Subject: [PATCH 12/19] Survive "cannot establish a reference" errors in TreeTypeMap --- compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala index 955892b2ae22..d2e18729836b 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala @@ -105,7 +105,8 @@ class TreeTypeMap( tree1.withType(mapType(tree1.tpe)) match { case id: Ident => if needsSelect(id.tpe) then - ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) + try ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) + catch case ex: TypeError => super.transform(id) else super.transform(id) case sel: Select => From 9d31fb2311730560098f7be1d5b8d0115315cfca Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 3 Nov 2023 11:08:46 +0100 Subject: [PATCH 13/19] Make SubstRecThis typemap idempotent --- compiler/src/dotty/tools/dotc/core/Substituters.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/Substituters.scala b/compiler/src/dotty/tools/dotc/core/Substituters.scala index 5a641416b3e1..bd30177adcb4 100644 --- a/compiler/src/dotty/tools/dotc/core/Substituters.scala +++ b/compiler/src/dotty/tools/dotc/core/Substituters.scala @@ -189,7 +189,7 @@ object Substituters: def apply(tp: Type): Type = substThis(tp, from, to, this)(using mapCtx) } - final class SubstRecThisMap(from: Type, to: Type)(using Context) extends DeepTypeMap { + final class SubstRecThisMap(from: Type, to: Type)(using Context) extends DeepTypeMap, IdempotentCaptRefMap { def apply(tp: Type): Type = substRecThis(tp, from, to, this)(using mapCtx) } From 7d950b799cce572a13a520133d4a19caa128276f Mon Sep 17 00:00:00 2001 From: odersky Date: Fri, 3 Nov 2023 17:52:22 +0100 Subject: [PATCH 14/19] Better error message for capture errors involving self types --- .../src/dotty/tools/dotc/cc/CheckCaptures.scala | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index a8ff9d3d5955..066bba19252c 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -263,11 +263,12 @@ class CheckCaptures extends Recheck, SymTransformer: pos, provenance) /** Check subcapturing `cs1 <: cs2`, report error on failure */ - def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos, provenance: => String = "")(using Context) = + def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos, + provenance: => String = "", cs1description: String = "")(using Context) = checkOK( cs1.subCaptures(cs2, frozen = false), - if cs1.elems.size == 1 then i"reference ${cs1.elems.toList.head} is not" - else i"references $cs1 are not all", + if cs1.elems.size == 1 then i"reference ${cs1.elems.toList.head}$cs1description is not" + else i"references $cs1$cs1description are not all", pos, provenance) /** The current environment */ @@ -683,9 +684,15 @@ class CheckCaptures extends Recheck, SymTransformer: if !param.hasAnnotation(defn.ConstructorOnlyAnnot) then checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) for pureBase <- cls.pureBaseClass do // (4) + def selfType = impl.body + .collect: + case TypeDef(tpnme.SELF, rhs) => rhs + .headOption + .getOrElse(tree) + .orElse(tree) checkSubset(thisSet, CaptureSet.empty.withDescription(i"of pure base class $pureBase"), - tree.srcPos) + selfType.srcPos, cs1description = " captured by this self type") super.recheckClassDef(tree, impl, cls) finally curEnv = saved From 5610730b9c64f24459dccc6323cc27314c7898e2 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 4 Nov 2023 10:54:57 +0100 Subject: [PATCH 15/19] Add sealed refs test and fix check files of other tests --- tests/neg-custom-args/captures/cc-this.check | 2 +- tests/neg-custom-args/captures/cc-this2.check | 14 +++++-- .../captures/cc-this2/D_2.scala | 2 +- .../captures/exception-definitions.check | 9 ++-- .../captures/exception-definitions.scala | 4 +- .../captures/sealed-refs.scala | 42 +++++++++++++++++++ 6 files changed, 60 insertions(+), 13 deletions(-) create mode 100644 tests/neg-custom-args/captures/sealed-refs.scala diff --git a/tests/neg-custom-args/captures/cc-this.check b/tests/neg-custom-args/captures/cc-this.check index 335302c5c259..070e815d6d45 100644 --- a/tests/neg-custom-args/captures/cc-this.check +++ b/tests/neg-custom-args/captures/cc-this.check @@ -12,4 +12,4 @@ -- Error: tests/neg-custom-args/captures/cc-this.scala:17:8 ------------------------------------------------------------ 17 | class C4(val f: () => Int) extends C3 // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | reference (C4.this.f : () => Int) is not included in the allowed capture set {} of pure base class class C3 + |reference (C4.this.f : () => Int) captured by this self type is not included in the allowed capture set {} of pure base class class C3 diff --git a/tests/neg-custom-args/captures/cc-this2.check b/tests/neg-custom-args/captures/cc-this2.check index 5e43a45b67f5..bd9a1085d262 100644 --- a/tests/neg-custom-args/captures/cc-this2.check +++ b/tests/neg-custom-args/captures/cc-this2.check @@ -1,6 +1,12 @@ --- Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 -------------------------------------------------------- +-- Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:3:8 -------------------------------------------------------- +3 | this: D^ => // error + | ^^ + |reference (caps.cap : caps.Cap) captured by this self type is not included in the allowed capture set {} of pure base class class C +-- [E058] Type Mismatch Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 ----------------------------------- 2 |class D extends C: // error - |^ - |reference (caps.cap : caps.Cap) is not included in the allowed capture set {} of pure base class class C -3 | this: D^ => + | ^ + | illegal inheritance: self type D^ of class D does not conform to self type C + | of parent class C + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/captures/cc-this2/D_2.scala b/tests/neg-custom-args/captures/cc-this2/D_2.scala index b22e5e456092..de1a722f73a9 100644 --- a/tests/neg-custom-args/captures/cc-this2/D_2.scala +++ b/tests/neg-custom-args/captures/cc-this2/D_2.scala @@ -1,3 +1,3 @@ class D extends C: // error - this: D^ => + this: D^ => // error diff --git a/tests/neg-custom-args/captures/exception-definitions.check b/tests/neg-custom-args/captures/exception-definitions.check index 4b1fe0273f52..72b88f252e59 100644 --- a/tests/neg-custom-args/captures/exception-definitions.check +++ b/tests/neg-custom-args/captures/exception-definitions.check @@ -1,8 +1,7 @@ --- Error: tests/neg-custom-args/captures/exception-definitions.scala:2:6 ----------------------------------------------- -2 |class Err extends Exception: // error - |^ - |reference (caps.cap : caps.Cap) is not included in the allowed capture set {} of pure base class class Throwable -3 | self: Err^ => +-- Error: tests/neg-custom-args/captures/exception-definitions.scala:3:8 ----------------------------------------------- +3 | self: Err^ => // error + | ^^^^ + |reference (caps.cap : caps.Cap) captured by this self type is not included in the allowed capture set {} of pure base class class Throwable -- Error: tests/neg-custom-args/captures/exception-definitions.scala:7:12 ---------------------------------------------- 7 | val x = c // error | ^ diff --git a/tests/neg-custom-args/captures/exception-definitions.scala b/tests/neg-custom-args/captures/exception-definitions.scala index a19b751825b8..fbc9f3fd1d33 100644 --- a/tests/neg-custom-args/captures/exception-definitions.scala +++ b/tests/neg-custom-args/captures/exception-definitions.scala @@ -1,6 +1,6 @@ -class Err extends Exception: // error - self: Err^ => +class Err extends Exception: + self: Err^ => // error def test(c: Any^) = class Err2 extends Exception: diff --git a/tests/neg-custom-args/captures/sealed-refs.scala b/tests/neg-custom-args/captures/sealed-refs.scala new file mode 100644 index 000000000000..05fa483acf28 --- /dev/null +++ b/tests/neg-custom-args/captures/sealed-refs.scala @@ -0,0 +1,42 @@ +class Ref[sealed A](init: A): + this: Ref[A]^ => + private var x: A = init + def get: A = x + def set(x: A): Unit = this.x = x + +class It[X]: + this: It[X]^ => + +def f1[B1](x: B1, next: B1 -> B1) = + var r = x // ok + r = next(x) + r + +def f2[B2](x: B2, next: B2 -> B2) = + val r = Ref[B2](x) // error + r.set(next(x)) + r.get + +def g[sealed B](x: B, next: B -> B) = + val r = Ref[B](x) // ok + r.set(next(x)) + r.get + +import annotation.unchecked.uncheckedCaptures + +def h[B](x: B, next: B -> B) = + val r = Ref[B @uncheckedCaptures](x) // ok + r.set(next(x)) + r.get + +def f3[B](x: B, next: B -> B) = + val r: Ref[B^{cap[f3]}] = Ref[B^{cap[f3]}](x) // error + r.set(next(x)) + val y = r.get + () + +def f4[B](x: B, next: B -> B) = + val r: Ref[B]^{cap[f4]} = Ref[B](x) // error + r.set(next(x)) + val y = r.get + () \ No newline at end of file From d4c084c4269a0bf43e2e402a204afb7d89307fd8 Mon Sep 17 00:00:00 2001 From: odersky Date: Sat, 4 Nov 2023 12:20:15 +0100 Subject: [PATCH 16/19] Refine isParametric tests Mutable variables can appeal to parametricty only if they are not captured. We use "not captured by any closure" as a sound approximation for that, since variables themselves are currently not tracked, so we cannot use soemthing more finegrained. --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 89 +++++++++++++++---- tests/neg-custom-args/captures/buffers.check | 6 +- tests/neg-custom-args/captures/levels.check | 4 +- .../captures/sealed-leaks.check | 50 +++++++++++ .../captures/sealed-leaks.scala | 32 ++++++- 5 files changed, 160 insertions(+), 21 deletions(-) create mode 100644 tests/neg-custom-args/captures/sealed-leaks.check diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 066bba19252c..48e2d7635a80 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -16,7 +16,7 @@ import typer.ErrorReporting.{Addenda, err} import typer.ProtoTypes.{AnySelectionProto, LhsProto} import util.{SimpleIdentitySet, EqHashMap, EqHashSet, SrcPos, Property} import transform.SymUtils.* -import transform.{Recheck, PreRecheck} +import transform.{Recheck, PreRecheck, CapturedVars} import Recheck.* import scala.collection.mutable import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap, CompareResult} @@ -149,15 +149,25 @@ object CheckCaptures: private val seen = new EqHashSet[TypeRef] + /** Check that there is at least one method containing carrier and defined + * in the scope of tparam. E.g. this is OK: + * def f[T] = { ... var x: T ... } + * So is this: + * class C[T] { def f() = { class D { var x: T }}} + * But this is not OK: + * class C[T] { object o { var x: T }} + */ extension (tparam: Symbol) def isParametricIn(carrier: Symbol): Boolean = - val encl = carrier.maybeOwner.enclosingMethodOrClass - if encl.isClass then tparam.isParametricIn(encl) - else - def recur(encl: Symbol): Boolean = - if tparam.owner == encl then true - else if encl.isStatic || !encl.exists then false - else recur(encl.owner.enclosingMethodOrClass) - recur(encl) + carrier.exists && { + val encl = carrier.owner.enclosingMethodOrClass + if encl.isClass then tparam.isParametricIn(encl) + else + def recur(encl: Symbol): Boolean = + if tparam.owner == encl then true + else if encl.isStatic || !encl.exists then false + else recur(encl.owner.enclosingMethodOrClass) + recur(encl) + } def traverse(t: Type) = t.dealiasKeepAnnots match @@ -168,9 +178,12 @@ object CheckCaptures: t.info match case TypeBounds(_, hi) if !t.isSealed && !t.symbol.isParametricIn(carrier) => if hi.isAny then + val detailStr = + if t eq tp then "variable" + else i"refers to the type variable $t, which" report.error( em"""$what cannot $have $tp since - |that type refers to the type variable $t, which is not sealed. + |that type $detailStr is not sealed. |$addendum""", pos) else @@ -549,7 +562,7 @@ class CheckCaptures extends Recheck, SymTransformer: for case (arg: TypeTree, formal, pname) <- args.lazyZip(polyType.paramRefs).lazyZip((polyType.paramNames)) do if formal.isSealed then def where = if fn.symbol.exists then i" in an argument of ${fn.symbol}" else "" - disallowRootCapabilitiesIn(arg.knownType, fn.symbol, + disallowRootCapabilitiesIn(arg.knownType, NoSymbol, i"Sealed type variable $pname", "be instantiated to", i"This is often caused by a local capability$where\nleaking as part of its result.", tree.srcPos) @@ -590,13 +603,58 @@ class CheckCaptures extends Recheck, SymTransformer: openClosures = openClosures.tail end recheckClosureBlock + /** Maps mutable variables to the symbols that capture them (in the + * CheckCaptures sense, i.e. symbol is referred to from a different method + * than the one it is defined in). + */ + private val capturedBy = util.HashMap[Symbol, Symbol]() + + /** Maps anonymous functions appearing as function arguments to + * the function that is called. + */ + private val anonFunCallee = util.HashMap[Symbol, Symbol]() + + /** Populates `capturedBy` and `anonFunCallee`. Called by `checkUnit`. + */ + private def collectCapturedMutVars(using Context) = new TreeTraverser: + def traverse(tree: Tree)(using Context) = tree match + case id: Ident => + val sym = id.symbol + if sym.is(Mutable, butNot = Method) && sym.owner.isTerm then + val enclMeth = ctx.owner.enclosingMethod + if sym.enclosingMethod != enclMeth then + capturedBy(sym) = enclMeth + case Apply(fn, args) => + for case closureDef(mdef) <- args do + anonFunCallee(mdef.symbol) = fn.symbol + traverseChildren(tree) + case Inlined(_, bindings, expansion) => + traverse(bindings) + traverse(expansion) + case mdef: DefDef => + if !mdef.symbol.isInlineMethod then traverseChildren(tree) + case _ => + traverseChildren(tree) + override def recheckValDef(tree: ValDef, sym: Symbol)(using Context): Type = try if sym.is(Module) then sym.info // Modules are checked by checking the module class else if sym.is(Mutable) && !sym.hasAnnotation(defn.UncheckedCapturesAnnot) then - disallowRootCapabilitiesIn(tree.tpt.knownType, sym, - i"mutable $sym", "have type", "", sym.srcPos) + val (carrier, addendum) = capturedBy.get(sym) match + case Some(encl) => + val enclStr = + if encl.isAnonymousFunction then + val location = anonFunCallee.get(encl) match + case Some(meth) if meth.exists => i" argument in a call to $meth" + case _ => "" + s"an anonymous function$location" + else encl.show + (NoSymbol, i"\nNote that $sym does not count as local since it is captured by $enclStr") + case _ => + (sym, "") + disallowRootCapabilitiesIn( + tree.tpt.knownType, carrier, i"Mutable $sym", "have type", addendum, sym.srcPos) checkInferredResult(super.recheckValDef(tree, sym), tree) finally if !sym.is(Param) then @@ -1168,11 +1226,12 @@ class CheckCaptures extends Recheck, SymTransformer: private val setup: SetupAPI = thisPhase.prev.asInstanceOf[Setup] override def checkUnit(unit: CompilationUnit)(using Context): Unit = - setup.setupUnit(ctx.compilationUnit.tpdTree, completeDef) + setup.setupUnit(unit.tpdTree, completeDef) + collectCapturedMutVars.traverse(unit.tpdTree) if ctx.settings.YccPrintSetup.value then val echoHeader = "[[syntax tree at end of cc setup]]" - val treeString = show(ctx.compilationUnit.tpdTree) + val treeString = show(unit.tpdTree) report.echo(s"$echoHeader\n$treeString\n") withCaptureSetsExplained: diff --git a/tests/neg-custom-args/captures/buffers.check b/tests/neg-custom-args/captures/buffers.check index cdb7baa852fb..07acea3c48e3 100644 --- a/tests/neg-custom-args/captures/buffers.check +++ b/tests/neg-custom-args/captures/buffers.check @@ -1,7 +1,7 @@ -- Error: tests/neg-custom-args/captures/buffers.scala:11:6 ------------------------------------------------------------ 11 | var elems: Array[A] = new Array[A](10) // error // error | ^ - | mutable variable elems cannot have type Array[A] since + | Mutable variable elems cannot have type Array[A] since | that type refers to the type variable A, which is not sealed. -- Error: tests/neg-custom-args/captures/buffers.scala:16:38 ----------------------------------------------------------- 16 | def make[A: ClassTag](xs: A*) = new ArrayBuffer: // error @@ -14,13 +14,13 @@ 11 | var elems: Array[A] = new Array[A](10) // error // error | ^^^^^^^^ | Array cannot have element type A since - | that type refers to the type variable A, which is not sealed. + | that type variable is not sealed. | Since arrays are mutable, they have to be treated like variables, | so their element type must be sealed. -- Error: tests/neg-custom-args/captures/buffers.scala:22:9 ------------------------------------------------------------ 22 | val x: Array[A] = new Array[A](10) // error | ^^^^^^^^ | Array cannot have element type A since - | that type refers to the type variable A, which is not sealed. + | that type variable is not sealed. | Since arrays are mutable, they have to be treated like variables, | so their element type must be sealed. diff --git a/tests/neg-custom-args/captures/levels.check b/tests/neg-custom-args/captures/levels.check index f91f90fb652f..c0cc7f0a759c 100644 --- a/tests/neg-custom-args/captures/levels.check +++ b/tests/neg-custom-args/captures/levels.check @@ -1,8 +1,8 @@ -- Error: tests/neg-custom-args/captures/levels.scala:6:16 ------------------------------------------------------------- 6 | private var v: T = init // error | ^ - | mutable variable v cannot have type T since - | that type refers to the type variable T, which is not sealed. + | Mutable variable v cannot have type T since + | that type variable is not sealed. -- Error: tests/neg-custom-args/captures/levels.scala:17:13 ------------------------------------------------------------ 17 | val _ = Ref[String => String]((x: String) => x) // error | ^^^^^^^^^^^^^^^^^^^^^ diff --git a/tests/neg-custom-args/captures/sealed-leaks.check b/tests/neg-custom-args/captures/sealed-leaks.check new file mode 100644 index 000000000000..f7098eba32b6 --- /dev/null +++ b/tests/neg-custom-args/captures/sealed-leaks.check @@ -0,0 +1,50 @@ +-- [E129] Potential Issue Warning: tests/neg-custom-args/captures/sealed-leaks.scala:31:6 ------------------------------ +31 | () + | ^^ + | A pure expression does nothing in statement position + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:12:27 ------------------------------------------------------ +12 | val later2 = usingLogFile[(() => Unit) | Null] { f => () => f.write(0) } // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Sealed type variable T cannot be instantiated to (() => Unit) | Null since + | that type captures the root capability `cap`. + | This is often caused by a local capability in an argument of method usingLogFile + | leaking as part of its result. +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/sealed-leaks.scala:19:26 --------------------------------- +19 | usingLogFile { f => x = f } // error + | ^ + | Found: (f : java.io.FileOutputStream^) + | Required: (java.io.FileOutputStream | Null)^{cap[Test2]} + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:30:10 ------------------------------------------------------ +30 | var x: T = y // error + | ^ + | Mutable variable x cannot have type T since + | that type variable is not sealed. +-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:39:8 ------------------------------------------------------- +39 | var x: T = y // error + | ^ + | Mutable variable x cannot have type T since + | that type variable is not sealed. + | + | Note that variable x does not count as local since it is captured by an anonymous function +-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:43:8 ------------------------------------------------------- +43 | var x: T = y // error + | ^ + |Mutable variable x cannot have type T since + |that type variable is not sealed. + | + |Note that variable x does not count as local since it is captured by an anonymous function argument in a call to method identity +-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:47:8 ------------------------------------------------------- +47 | var x: T = y // error + | ^ + | Mutable variable x cannot have type T since + | that type variable is not sealed. + | + | Note that variable x does not count as local since it is captured by method foo +-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:11:14 ------------------------------------------------------ +11 | val later = usingLogFile { f => () => f.write(0) } // error + | ^^^^^^^^^^^^ + | local reference f leaks into outer capture set of type parameter T of method usingLogFile diff --git a/tests/neg-custom-args/captures/sealed-leaks.scala b/tests/neg-custom-args/captures/sealed-leaks.scala index a7acf77b5678..2555ba8a3e07 100644 --- a/tests/neg-custom-args/captures/sealed-leaks.scala +++ b/tests/neg-custom-args/captures/sealed-leaks.scala @@ -18,4 +18,34 @@ def Test2 = usingLogFile { f => x = f } // error - later() \ No newline at end of file + later() + +def Test3 = + def f[T](y: T) = + var x: T = y + () + + class C[T](y: T): + object o: + var x: T = y // error + () + + class C2[T](y: T): + def f = + var x: T = y // ok + () + + def g1[T](y: T): T => Unit = + var x: T = y // error + y => x = y + + def g2[T](y: T): T => Unit = + var x: T = y // error + identity(y => x = y) + + def g3[T](y: T): Unit = + var x: T = y // error + def foo = + x = y + () + From dccadb9e8c394043cafb3cc42c259480288144b5 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 6 Nov 2023 18:09:18 +0100 Subject: [PATCH 17/19] Don't recheck inherited trait parameters during capture checking The logic gets confused by the added capture refinements. --- .../dotty/tools/dotc/cc/CheckCaptures.scala | 2 + .../tools/dotc/transform/CapturedVars.scala | 55 +++++++------------ .../dotty/tools/dotc/typer/RefChecks.scala | 5 +- .../captures/sealed-lowerbound.scala | 12 ++++ 4 files changed, 38 insertions(+), 36 deletions(-) create mode 100644 tests/pos-custom-args/captures/sealed-lowerbound.scala diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 48e2d7635a80..a49bd9f79351 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -1190,6 +1190,8 @@ class CheckCaptures extends Recheck, SymTransformer: override def needsCheck(overriding: Symbol, overridden: Symbol)(using Context): Boolean = !setup.isPreCC(overriding) && !setup.isPreCC(overridden) + + override def checkInheritedTraitParameters: Boolean = false end OverridingPairsCheckerCC def traverse(t: Tree)(using Context) = diff --git a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala index a018bbd1a3ac..202e3d72fa25 100644 --- a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala +++ b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala @@ -13,25 +13,20 @@ import core.NameKinds.TempResultName import core.Constants._ import util.Store import dotty.tools.uncheckedNN - -import scala.compiletime.uninitialized +import ast.tpd.* +import compiletime.uninitialized /** This phase translates variables that are captured in closures to * heap-allocated refs. */ class CapturedVars extends MiniPhase with IdentityDenotTransformer: thisPhase => - import ast.tpd._ override def phaseName: String = CapturedVars.name override def description: String = CapturedVars.description - private[this] var Captured: Store.Location[util.ReadOnlySet[Symbol]] = uninitialized - private def captured(using Context) = ctx.store(Captured) - - override def initContext(ctx: FreshContext): Unit = - Captured = ctx.addLocation(util.ReadOnlySet.empty) + private val captured = util.HashSet[Symbol]() private class RefInfo(using Context) { /** The classes for which a Ref type exists. */ @@ -57,33 +52,10 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer: myRefInfo.uncheckedNN } - private class CollectCaptured extends TreeTraverser { - private val captured = util.HashSet[Symbol]() - def traverse(tree: Tree)(using Context) = tree match { - case id: Ident => - val sym = id.symbol - if (sym.is(Mutable, butNot = Method) && sym.owner.isTerm) { - val enclMeth = ctx.owner.enclosingMethod - if (sym.enclosingMethod != enclMeth) { - report.log(i"capturing $sym in ${sym.enclosingMethod}, referenced from $enclMeth") - captured += sym - } - } - case _ => - traverseChildren(tree) - } - def runOver(tree: Tree)(using Context): util.ReadOnlySet[Symbol] = { - traverse(tree) - captured - } - } - - override def prepareForUnit(tree: Tree)(using Context): Context = { - val captured = atPhase(thisPhase) { - CollectCaptured().runOver(ctx.compilationUnit.tpdTree) - } - ctx.fresh.updateStore(Captured, captured) - } + override def prepareForUnit(tree: Tree)(using Context): Context = + captured.clear() + atPhase(thisPhase)(CapturedVars.collect(captured)).traverse(tree) + ctx /** The {Volatile|}{Int|Double|...|Object}Ref class corresponding to the class `cls`, * depending on whether the reference should be @volatile @@ -143,3 +115,16 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer: object CapturedVars: val name: String = "capturedVars" val description: String = "represent vars captured by closures as heap objects" + + def collect(captured: util.HashSet[Symbol]): TreeTraverser = new: + def traverse(tree: Tree)(using Context) = tree match + case id: Ident => + val sym = id.symbol + if sym.is(Mutable, butNot = Method) && sym.owner.isTerm then + val enclMeth = ctx.owner.enclosingMethod + if sym.enclosingMethod != enclMeth then + report.log(i"capturing $sym in ${sym.enclosingMethod}, referenced from $enclMeth") + captured += sym + case _ => + traverseChildren(tree) +end CapturedVars diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index eef88e76971e..af279844f370 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -267,6 +267,9 @@ object RefChecks { if !other.is(Deferred) then checkOverride(subtypeChecker, dcl, other) end checkAll + + // Disabled for capture checking since traits can get different parameter refinements + def checkInheritedTraitParameters: Boolean = true end OverridingPairsChecker /** 1. Check all members of class `clazz` for overriding conditions. @@ -851,7 +854,7 @@ object RefChecks { checkCaseClassInheritanceInvariant() } - if (!clazz.is(Trait)) { + if (!clazz.is(Trait) && checker.checkInheritedTraitParameters) { // check that parameterized base classes and traits are typed in the same way as from the superclass // I.e. say we have // diff --git a/tests/pos-custom-args/captures/sealed-lowerbound.scala b/tests/pos-custom-args/captures/sealed-lowerbound.scala new file mode 100644 index 000000000000..e848f784cddc --- /dev/null +++ b/tests/pos-custom-args/captures/sealed-lowerbound.scala @@ -0,0 +1,12 @@ +def foo[sealed B](x: B): B = x + +def bar[B, sealed A >: B](x: A): A = foo[A](x) + +class C[sealed A] + +class CV[sealed A](x: Int): + def this() = this: + val x = new C[A]: + println("foo") + 0 + From 3caf116571771df1adc251523dfe5d0b511fb7e2 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 6 Nov 2023 17:44:16 +0100 Subject: [PATCH 18/19] Original version of stdlib collections without capture checking --- .../dotty/tools/dotc/CompilationTests.scala | 2 +- tests/pos-special/stdlib/Test1.scala | 34 - tests/pos-special/stdlib/Test2.scala | 232 -- .../stdlib/collection/ArrayOps.scala | 1663 +++++++++++ .../stdlib/collection/BitSet.scala | 348 +++ .../stdlib/collection/BufferedIterator.scala | 32 + .../stdlib/collection/BuildFrom.scala | 122 + .../stdlib/collection/DefaultMap.scala | 21 + .../stdlib/collection/Factory.scala | 784 ++++++ .../stdlib/collection/Hashing.scala | 62 + .../stdlib/collection/IndexedSeq.scala | 3 +- .../stdlib/collection/IndexedSeqView.scala | 180 ++ .../stdlib/collection/Iterable.scala | 153 +- .../stdlib/collection/IterableOnce.scala | 122 +- .../stdlib/collection/Iterator.scala | 138 +- .../stdlib/collection/JavaConverters.scala | 335 +++ .../stdlib/collection/LazyZipOps.scala | 422 +++ .../stdlib/collection/LinearSeq.scala | 9 +- tests/pos-special/stdlib/collection/Map.scala | 19 +- .../stdlib/collection/MapView.scala | 187 ++ .../stdlib/collection/Searching.scala | 57 + tests/pos-special/stdlib/collection/Seq.scala | 38 +- .../stdlib/collection/SeqMap.scala | 40 + .../stdlib/collection/SeqView.scala | 209 ++ tests/pos-special/stdlib/collection/Set.scala | 269 ++ .../stdlib/collection/SortedMap.scala | 220 ++ .../stdlib/collection/SortedOps.scala | 90 + .../stdlib/collection/SortedSet.scala | 189 ++ .../stdlib/collection/Stepper.scala | 368 +++ .../stdlib/collection/StepperShape.scala | 114 + .../StrictOptimizedIterableOps.scala | 24 +- .../collection/StrictOptimizedMapOps.scala | 48 + .../collection/StrictOptimizedSeqOps.scala | 9 +- .../collection/StrictOptimizedSetOps.scala | 29 + .../StrictOptimizedSortedMapOps.scala | 46 + .../StrictOptimizedSortedSetOps.scala | 42 + .../stdlib/collection/StringOps.scala | 35 +- .../stdlib/collection/StringParsers.scala | 319 +++ .../pos-special/stdlib/collection/View.scala | 138 +- .../stdlib/collection/WithFilter.scala | 70 + .../collection/concurrent/BasicNode.java | 19 + .../collection/concurrent/CNodeBase.java | 37 + .../stdlib/collection/concurrent/Gen.java | 15 + .../collection/concurrent/INodeBase.java | 39 + .../collection/concurrent/MainNode.java | 46 + .../stdlib/collection/concurrent/Map.scala | 192 ++ .../collection/concurrent/TrieMap.scala | 1202 ++++++++ .../collection/convert/AsJavaConverters.scala | 260 ++ .../collection/convert/AsJavaExtensions.scala | 108 + .../convert/AsScalaConverters.scala | 207 ++ .../convert/AsScalaExtensions.scala | 93 + .../convert/ImplicitConversions.scala | 181 ++ .../convert/JavaCollectionWrappers.scala | 614 ++++ .../collection/convert/StreamExtensions.scala | 480 ++++ .../convert/impl/ArrayStepper.scala | 79 + .../convert/impl/BinaryTreeStepper.scala | 248 ++ .../convert/impl/BitSetStepper.scala | 118 + .../convert/impl/ChampStepper.scala | 245 ++ .../convert/impl/InOrderStepperBase.scala | 53 + .../convert/impl/IndexedSeqStepper.scala | 44 + .../convert/impl/IndexedStepperBase.scala | 40 + .../convert/impl/IteratorStepper.scala | 129 + .../convert/impl/NumericRangeStepper.scala | 38 + .../convert/impl/RangeStepper.scala | 40 + .../convert/impl/StringStepper.scala | 58 + .../convert/impl/TableStepper.scala | 138 + .../convert/impl/VectorStepper.scala | 131 + .../collection/generic/BitOperations.scala | 50 + .../generic/DefaultSerializationProxy.scala | 87 + .../collection/generic/IsIterable.scala | 164 ++ .../collection/generic/IsIterableOnce.scala | 71 + .../stdlib/collection/generic/IsMap.scala | 114 + .../stdlib/collection/generic/IsSeq.scala | 114 + .../collection/generic/Subtractable.scala | 62 + .../stdlib/collection/generic/package.scala | 34 + .../collection/immutable/ArraySeq.scala | 685 +++++ .../stdlib/collection/immutable/BitSet.scala | 375 +++ .../collection/immutable/ChampCommon.scala | 252 ++ .../stdlib/collection/immutable/HashMap.scala | 2423 ++++++++++++++++ .../stdlib/collection/immutable/HashSet.scala | 2123 ++++++++++++++ .../stdlib/collection/immutable/IntMap.scala | 502 ++++ .../collection/immutable/Iterable.scala | 2 - .../collection/immutable/LazyList.scala | 1381 +++++++++ .../stdlib/collection/immutable/List.scala | 25 +- .../stdlib/collection/immutable/ListMap.scala | 371 +++ .../stdlib/collection/immutable/ListSet.scala | 138 + .../stdlib/collection/immutable/LongMap.scala | 490 ++++ .../stdlib/collection/immutable/Map.scala | 692 +++++ .../collection/immutable/NumericRange.scala | 507 ++++ .../stdlib/collection/immutable/Queue.scala | 217 ++ .../stdlib/collection/immutable/Range.scala | 672 +++++ .../collection/immutable/RedBlackTree.scala | 1231 ++++++++ .../stdlib/collection/immutable/Seq.scala | 14 +- .../stdlib/collection/immutable/SeqMap.scala | 276 ++ .../stdlib/collection/immutable/Set.scala | 398 +++ .../collection/immutable/SortedMap.scala | 177 ++ .../collection/immutable/SortedSet.scala | 57 + .../stdlib/collection/immutable/Stream.scala | 568 ++++ .../immutable/StrictOptimizedSeqOps.scala | 80 + .../stdlib/collection/immutable/TreeMap.scala | 370 +++ .../collection/immutable/TreeSeqMap.scala | 649 +++++ .../stdlib/collection/immutable/TreeSet.scala | 296 ++ .../stdlib/collection/immutable/Vector.scala | 2474 +++++++++++++++++ .../collection/immutable/VectorMap.scala | 275 ++ .../collection/immutable/WrappedString.scala | 140 + .../stdlib/collection/immutable/package.scala | 29 + .../stdlib/collection/mutable/AnyRefMap.scala | 601 ++++ .../collection/mutable/ArrayBuffer.scala | 403 +++ .../collection/mutable/ArrayBuilder.scala | 522 ++++ .../collection/mutable/ArrayDeque.scala | 645 +++++ .../stdlib/collection/mutable/ArraySeq.scala | 347 +++ .../stdlib/collection/mutable/BitSet.scala | 392 +++ .../stdlib/collection/mutable/Buffer.scala | 19 +- .../stdlib/collection/mutable/Builder.scala | 14 +- .../mutable/CheckedIndexedSeqView.scala | 117 + .../stdlib/collection/mutable/Cloneable.scala | 22 + .../mutable/CollisionProofHashMap.scala | 888 ++++++ .../stdlib/collection/mutable/Growable.scala | 8 +- .../collection/mutable/GrowableBuilder.scala | 37 + .../stdlib/collection/mutable/HashMap.scala | 654 +++++ .../stdlib/collection/mutable/HashSet.scala | 456 +++ .../stdlib/collection/mutable/HashTable.scala | 417 +++ .../collection/mutable/ImmutableBuilder.scala | 31 + .../collection/mutable/IndexedSeq.scala | 83 + .../stdlib/collection/mutable/Iterable.scala | 5 +- .../collection/mutable/LinkedHashMap.scala | 509 ++++ .../collection/mutable/LinkedHashSet.scala | 348 +++ .../collection/mutable/ListBuffer.scala | 20 +- .../stdlib/collection/mutable/ListMap.scala | 82 + .../stdlib/collection/mutable/LongMap.scala | 673 +++++ .../stdlib/collection/mutable/Map.scala | 268 ++ .../stdlib/collection/mutable/MultiMap.scala | 115 + .../collection/mutable/MutationTracker.scala | 3 +- .../collection/mutable/OpenHashMap.scala | 306 ++ .../collection/mutable/PriorityQueue.scala | 402 +++ .../stdlib/collection/mutable/Queue.scala | 138 + .../collection/mutable/RedBlackTree.scala | 652 +++++ .../collection/mutable/ReusableBuilder.scala | 55 + .../stdlib/collection/mutable/Seq.scala | 1 - .../stdlib/collection/mutable/SeqMap.scala | 38 + .../stdlib/collection/mutable/Set.scala | 122 + .../collection/mutable/Shrinkable.scala | 5 +- .../stdlib/collection/mutable/SortedMap.scala | 103 + .../stdlib/collection/mutable/SortedSet.scala | 48 + .../stdlib/collection/mutable/Stack.scala | 142 + .../collection/mutable/StringBuilder.scala | 7 +- .../stdlib/collection/mutable/TreeMap.scala | 257 ++ .../stdlib/collection/mutable/TreeSet.scala | 218 ++ .../collection/mutable/UnrolledBuffer.scala | 442 +++ .../collection/mutable/WeakHashMap.scala | 55 + .../stdlib/collection/mutable/package.scala | 41 + .../stdlib/collection/package.scala | 80 + 152 files changed, 40745 insertions(+), 707 deletions(-) delete mode 100644 tests/pos-special/stdlib/Test1.scala delete mode 100644 tests/pos-special/stdlib/Test2.scala create mode 100644 tests/pos-special/stdlib/collection/ArrayOps.scala create mode 100644 tests/pos-special/stdlib/collection/BitSet.scala create mode 100644 tests/pos-special/stdlib/collection/BufferedIterator.scala create mode 100644 tests/pos-special/stdlib/collection/BuildFrom.scala create mode 100644 tests/pos-special/stdlib/collection/DefaultMap.scala create mode 100644 tests/pos-special/stdlib/collection/Factory.scala create mode 100644 tests/pos-special/stdlib/collection/Hashing.scala create mode 100644 tests/pos-special/stdlib/collection/IndexedSeqView.scala create mode 100644 tests/pos-special/stdlib/collection/JavaConverters.scala create mode 100644 tests/pos-special/stdlib/collection/LazyZipOps.scala create mode 100644 tests/pos-special/stdlib/collection/MapView.scala create mode 100644 tests/pos-special/stdlib/collection/Searching.scala create mode 100644 tests/pos-special/stdlib/collection/SeqMap.scala create mode 100644 tests/pos-special/stdlib/collection/SeqView.scala create mode 100644 tests/pos-special/stdlib/collection/Set.scala create mode 100644 tests/pos-special/stdlib/collection/SortedMap.scala create mode 100644 tests/pos-special/stdlib/collection/SortedOps.scala create mode 100644 tests/pos-special/stdlib/collection/SortedSet.scala create mode 100644 tests/pos-special/stdlib/collection/Stepper.scala create mode 100644 tests/pos-special/stdlib/collection/StepperShape.scala create mode 100644 tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala create mode 100644 tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala create mode 100644 tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala create mode 100644 tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala create mode 100644 tests/pos-special/stdlib/collection/StringParsers.scala create mode 100644 tests/pos-special/stdlib/collection/WithFilter.scala create mode 100644 tests/pos-special/stdlib/collection/concurrent/BasicNode.java create mode 100644 tests/pos-special/stdlib/collection/concurrent/CNodeBase.java create mode 100644 tests/pos-special/stdlib/collection/concurrent/Gen.java create mode 100644 tests/pos-special/stdlib/collection/concurrent/INodeBase.java create mode 100644 tests/pos-special/stdlib/collection/concurrent/MainNode.java create mode 100644 tests/pos-special/stdlib/collection/concurrent/Map.scala create mode 100644 tests/pos-special/stdlib/collection/concurrent/TrieMap.scala create mode 100644 tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala create mode 100644 tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala create mode 100644 tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala create mode 100644 tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala create mode 100644 tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala create mode 100644 tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala create mode 100644 tests/pos-special/stdlib/collection/convert/StreamExtensions.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala create mode 100644 tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala create mode 100644 tests/pos-special/stdlib/collection/generic/BitOperations.scala create mode 100644 tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala create mode 100644 tests/pos-special/stdlib/collection/generic/IsIterable.scala create mode 100644 tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala create mode 100644 tests/pos-special/stdlib/collection/generic/IsMap.scala create mode 100644 tests/pos-special/stdlib/collection/generic/IsSeq.scala create mode 100644 tests/pos-special/stdlib/collection/generic/Subtractable.scala create mode 100644 tests/pos-special/stdlib/collection/generic/package.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/ArraySeq.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/BitSet.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/ChampCommon.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/HashMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/HashSet.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/IntMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/LazyList.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/ListMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/ListSet.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/LongMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/Map.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/NumericRange.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/Queue.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/Range.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/SeqMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/Set.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/SortedMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/SortedSet.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/Stream.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/TreeMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/TreeSet.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/Vector.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/VectorMap.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/WrappedString.scala create mode 100644 tests/pos-special/stdlib/collection/immutable/package.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/ArraySeq.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/BitSet.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/Cloneable.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/HashMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/HashSet.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/HashTable.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/ListMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/LongMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/Map.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/MultiMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/Queue.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/SeqMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/Set.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/SortedMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/SortedSet.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/Stack.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/TreeMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/TreeSet.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala create mode 100644 tests/pos-special/stdlib/collection/mutable/package.scala create mode 100644 tests/pos-special/stdlib/collection/package.scala diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index 798e998ef241..fa89c82fc7e7 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -44,7 +44,7 @@ class CompilationTests { // Run tests for legacy lazy vals compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.posLazyValsAllowlist)), compileDir("tests/pos-special/java-param-names", defaultOptions.withJavacOnlyOptions("-parameters")), - compileDir("tests/pos-special/stdlib", defaultOptions), + compileDir("tests/pos-special/stdlib", allowDeepSubtypes), ) if scala.util.Properties.isJavaAtLeast("16") then diff --git a/tests/pos-special/stdlib/Test1.scala b/tests/pos-special/stdlib/Test1.scala deleted file mode 100644 index 9ee4e7cfa6a1..000000000000 --- a/tests/pos-special/stdlib/Test1.scala +++ /dev/null @@ -1,34 +0,0 @@ -import language.experimental.captureChecking -import collection.{View, Seq} -import collection.mutable.{ArrayBuffer, ListBuffer} - -import java.io.* - -object Test0: - - def usingLogFile[sealed T](op: FileOutputStream^ => T): T = - val logFile = FileOutputStream("log") - val result = op(logFile) - logFile.close() - result - - def test(xs: List[Int]) = - usingLogFile: f => - xs.map: x => - f.write(x) - x * x - -object Test1: - def test(it: Iterator[Int]^, v: View[Int]^) = - val isEven: Int ->{cap[test]} Boolean = _ % 2 == 0 - val it2 = it.filter(isEven) - val _: Iterator[Int]^{it, isEven} = it2 - val it2c: Iterator[Int]^{it2} = it2 - val v2 = v.filter(isEven) - val _: View[Int]^{v, isEven} = v2 - val v2c: View[Int]^{v2} = v2 - val v3 = v.drop(2) - val _: View[Int]^{v} = v3 - val v3c: View[Int]^{v3} = v3 - val (xs6, xs7) = v.partition(isEven) - val (xs6a, xs7a) = v.partition(_ % 2 == 0) diff --git a/tests/pos-special/stdlib/Test2.scala b/tests/pos-special/stdlib/Test2.scala deleted file mode 100644 index a59da522b183..000000000000 --- a/tests/pos-special/stdlib/Test2.scala +++ /dev/null @@ -1,232 +0,0 @@ -import scala.reflect.ClassTag -import language.experimental.captureChecking -import collection.{View, Seq} -import collection.mutable.{ArrayBuffer, ListBuffer} - -object Test { - - def seqOps(xs: Seq[Int]) = { // try with Seq[Int]^{cap} - val strPlusInt: (String, Int) => String = _ + _ - val intPlusStr: (Int, String) => String = _ + _ - val isEven: Int => Boolean = _ % 2 == 0 - val isNonNeg: Int => Boolean = _ > 0 - val flips: Int => List[Int] = x => x :: -x :: Nil - val x1 = xs.foldLeft("")(strPlusInt) - val y1: String = x1 - val x2 = xs.foldRight("")(intPlusStr) - val y2: String = x2 - val x3 = xs.indexWhere(isEven) - val y3: Int = x3 - val x4 = xs.head - val y4: Int = x4 - val x5 = xs.to(List) - val y5: List[Int] = x5 - val (xs6, xs7) = xs.partition(isEven) - val ys6: Seq[Int] = xs6 - val ys7: Seq[Int] = xs7 - val xs8 = xs.drop(2) - val ys8: Seq[Int] = xs8 - val xs9 = xs.map(isNonNeg) - val ys9: Seq[Boolean] = xs9 - val xs10 = xs.flatMap(flips) - val ys10: Seq[Int] = xs10 - val xs11 = xs ++ xs - val ys11: Seq[Int] = xs11 - val xs12 = xs ++ Nil - val ys12: Seq[Int] = xs12 - val xs13 = Nil ++ xs - val ys13: Seq[Int] = xs13 - val xs14 = xs ++ ("a" :: Nil) - val ys14: Seq[Any] = xs14 - val xs15 = xs.zip(xs9) - val ys15: Seq[(Int, Boolean)] = xs15 - val xs16 = xs.reverse - val ys16: Seq[Int] = xs16 - println("-------") - println(x1) - println(x2) - println(x3) - println(x4) - println(x5) - println(xs6) - println(xs7) - println(xs8) - println(xs9) - println(xs10) - println(xs11) - println(xs12) - println(xs13) - println(xs14) - println(xs15) - println(xs16) - } - - def iterOps(xs: => Iterator[Int]^) = - val strPlusInt: (String, Int) => String = _ + _ - val intPlusStr: (Int, String) => String = _ + _ - val isEven: Int ->{cap[iterOps]} Boolean = _ % 2 == 0 - val isNonNeg: Int => Boolean = _ > 0 - val flips: Int => List[Int] = x => x :: -x :: Nil - val x1 = xs.foldLeft("")(strPlusInt) - val y1: String = x1 - val x2 = xs.foldRight("")(intPlusStr) - val y2: String = x2 - val x4 = xs.next() - val y4: Int = x4 - val x5 = xs.to(List) - val y5: List[Int] = x5 - val (xs6, xs7) = xs.partition(isEven) - val ys6: Iterator[Int]^{xs6, isEven} = xs6 - val ys7: Iterator[Int]^{xs7, isEven} = xs7 - val (xs6a, xs7a) = xs.partition(_ % 2 == 0) - val ys6a: Iterator[Int]^{xs6} = xs6 - val ys7a: Iterator[Int]^{xs7} = xs7 - val xs8 = xs.drop(2) - val ys8: Iterator[Int]^{xs8} = xs8 - val xs9 = xs.map(isNonNeg) - val ys9: Iterator[Boolean]^{xs9} = xs9 - val xs10 = xs.flatMap(flips) - val ys10: Iterator[Int]^{xs10} = xs10 - val xs11 = xs ++ xs - val ys11: Iterator[Int]^{xs11} = xs11 - val xs12 = xs ++ Nil - val ys12: Iterator[Int]^{xs12} = xs12 - val xs13 = Nil ++ xs - val ys13: List[Int] = xs13 - val xs14 = xs ++ ("a" :: Nil) - val ys14: Iterator[Any]^{xs14} = xs14 - val xs15 = xs.zip(xs9) - val ys15: Iterator[(Int, Boolean)]^{xs15} = xs15 - println("-------") - println(x1) - println(x2) - println(x4) - println(x5) - println(xs6.to(List)) - println(xs7.to(List)) - println(xs8.to(List)) - println(xs9.to(List)) - println(xs10.to(List)) - println(xs11.to(List)) - println(xs12.to(List)) - println(xs13.to(List)) - println(xs14.to(List)) - println(xs15.to(List)) - - def viewOps(xs: View[Int]^) = { - val strPlusInt: (String, Int) => String = _ + _ - val intPlusStr: (Int, String) => String = _ + _ - val isEven: Int ->{cap[viewOps]} Boolean = _ % 2 == 0 - val isNonNeg: Int => Boolean = _ > 0 - val flips: Int => List[Int] = x => x :: -x :: Nil - val x1 = xs.foldLeft("")(strPlusInt) - val y1: String = x1 - val x2 = xs.foldRight("")(intPlusStr) - val y2: String = x2 - //val x3 = xs.indexWhere(_ % 2 == 0) // indexWhere does not exist on View - //val y3: Int = x3 - val x4 = xs.head - val y4: Int = x4 - val x5 = xs.to(List) - val y5: List[Int] = x5 - val (xs6, xs7) = xs.partition(isEven) - val ys6: View[Int]^{xs6, isEven} = xs6 - val ys7: View[Int]^{xs7, isEven} = xs7 - val (xs6a, xs7a) = xs.partition(_ % 2 == 0) - val ys6a: View[Int]^{xs6} = xs6 - val ys7a: View[Int]^{xs7} = xs7 - val xs8 = xs.drop(2) - val ys8: View[Int]^{xs8} = xs8 - val xs9 = xs.map(isNonNeg) - val ys9: View[Boolean]^{xs9} = xs9 - val xs10 = xs.flatMap(flips) - val ys10: View[Int]^{xs10} = xs10 - val xs11 = xs ++ xs - val ys11: View[Int]^{xs11} = xs11 - val xs12 = xs ++ Nil - val ys12: View[Int]^{xs12} = xs12 - val xs13 = Nil ++ xs - val ys13: List[Int] = xs13 - val xs14 = xs ++ ("a" :: Nil) - val ys14: View[Any]^{xs14} = xs14 - val xs15 = xs.zip(xs9) - val ys15: View[(Int, Boolean)]^{xs15} = xs15 - println("-------") - println(x1) - println(x2) - println(x4) - println(x5) - println(xs6.to(List)) - println(xs7.to(List)) - println(xs8.to(List)) - println(xs9.to(List)) - println(xs10.to(List)) - println(xs11.to(List)) - println(xs12.to(List)) - println(xs13.to(List)) - println(xs14.to(List)) - println(xs15.to(List)) - } - - def stringOps(xs: String) = { - val x1 = xs.foldLeft("")(_ + _) - val y1: String = x1 - val x2 = xs.foldRight("")(_ + _) - val y2: String = x2 - val x3 = xs.indexWhere(_ % 2 == 0) - val y3: Int = x3 - val x4 = xs.head - val y4: Int = x4 - val x5 = xs.to(List) - val y5: List[Char] = x5 - val (xs6, xs7) = xs.partition(_ % 2 == 0) - val ys6: String = xs6 - val ys7: String = xs7 - val xs8 = xs.drop(2) - val ys8: String = xs8 - val xs9 = xs.map(_ + 1) - val ys9: Seq[Int] = xs9 - val xs9a = xs.map(_.toUpper) - val ys9a: String = xs9a - val xs10 = xs.flatMap((x: Char) => s"$x,$x") - val ys10: String = xs10 - val xs11 = xs ++ xs - val ys11: String = xs11 - val ops = collection.StringOps(xs) // !!! otherwise we can a "cannot establish reference" - val xs13 = Nil ++ ops.iterator - val ys13: List[Char] = xs13 - val xs14 = xs ++ ("xyz" :: Nil) - val ys14: Seq[Any] = xs14 - val xs15 = xs.zip(xs9) - val ys15: Seq[(Char, Int)] = xs15 - println("-------") - println(x1) - println(x2) - println(x3) - println(x4) - println(x5) - println(xs6) - println(xs7) - println(xs8) - println(xs9) - println(xs9a) - println(xs10) - println(xs11) - println(xs13) - println(xs14) - println(xs15) - } - - def main(args: Array[String]) = { - val ints = List(1, 2, 3) - val intsBuf = ints.to(ArrayBuffer) - val intsListBuf = ints.to(ListBuffer) - val intsView = ints.view - seqOps(ints) - seqOps(intsBuf) - seqOps(intsListBuf) - viewOps(intsView) - iterOps(ints.iterator) - stringOps("abc") - } -} diff --git a/tests/pos-special/stdlib/collection/ArrayOps.scala b/tests/pos-special/stdlib/collection/ArrayOps.scala new file mode 100644 index 000000000000..485427886625 --- /dev/null +++ b/tests/pos-special/stdlib/collection/ArrayOps.scala @@ -0,0 +1,1663 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import java.lang.Math.{max, min} +import java.util.Arrays + +import scala.Predef.{ // unimport all array-related implicit conversions to avoid triggering them accidentally + genericArrayOps => _, + booleanArrayOps => _, + byteArrayOps => _, + charArrayOps => _, + doubleArrayOps => _, + floatArrayOps => _, + intArrayOps => _, + longArrayOps => _, + refArrayOps => _, + shortArrayOps => _, + unitArrayOps => _, + genericWrapArray => _, + wrapRefArray => _, + wrapIntArray => _, + wrapDoubleArray => _, + wrapLongArray => _, + wrapFloatArray => _, + wrapCharArray => _, + wrapByteArray => _, + wrapShortArray => _, + wrapBooleanArray => _, + wrapUnitArray => _, + wrapString => _, + copyArrayToImmutableIndexedSeq => _, + _ +} +import scala.collection.Stepper.EfficientSplit +import scala.collection.immutable.Range +import scala.collection.mutable.ArrayBuilder +import scala.math.Ordering +import scala.reflect.ClassTag +import scala.util.Sorting + +object ArrayOps { + + @SerialVersionUID(3L) + private class ArrayView[A](xs: Array[A]) extends AbstractIndexedSeqView[A] { + def length = xs.length + def apply(n: Int) = xs(n) + override def toString: String = immutable.ArraySeq.unsafeWrapArray(xs).mkString("ArrayView(", ", ", ")") + } + + /** A lazy filtered array. No filtering is applied until one of `foreach`, `map` or `flatMap` is called. */ + class WithFilter[A](p: A => Boolean, xs: Array[A]) { + + /** Apply `f` to each element for its side effects. + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreach[U](f: A => U): Unit = { + val len = xs.length + var i = 0 + while(i < len) { + val x = xs(i) + if(p(x)) f(x) + i += 1 + } + } + + /** Builds a new array by applying a function to all elements of this array. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given function + * `f` to each element of this array and collecting the results. + */ + def map[B: ClassTag](f: A => B): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while (i < xs.length) { + val x = xs(i) + if(p(x)) b += f(x) + i = i + 1 + } + b.result() + } + + /** Builds a new array by applying a function to all elements of this array + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given collection-valued function + * `f` to each element of this array and concatenating the results. + */ + def flatMap[B: ClassTag](f: A => IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while(i < xs.length) { + val x = xs(i) + if(p(x)) b ++= f(xs(i)) + i += 1 + } + b.result() + } + + def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + flatMap[B](x => asIterable(f(x))) + + /** Creates a new non-strict filter which combines this filter with the given predicate. */ + def withFilter(q: A => Boolean): WithFilter[A] = new WithFilter[A](a => p(a) && q(a), xs) + } + + @SerialVersionUID(3L) + private[collection] final class ArrayIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private[this] var pos = 0 + private[this] val len = xs.length + override def knownSize: Int = len - pos + def hasNext: Boolean = pos < len + def next(): A = { + if (pos >= xs.length) Iterator.empty.next() + val r = xs(pos) + pos += 1 + r + } + override def drop(n: Int): Iterator[A] = { + if (n > 0) { + val newPos = pos + n + pos = + if (newPos < 0 /* overflow */) len + else Math.min(len, newPos) + } + this + } + } + + @SerialVersionUID(3L) + private final class ReverseIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private[this] var pos = xs.length-1 + def hasNext: Boolean = pos >= 0 + def next(): A = { + if (pos < 0) Iterator.empty.next() + val r = xs(pos) + pos -= 1 + r + } + + override def drop(n: Int): Iterator[A] = { + if (n > 0) pos = Math.max( -1, pos - n) + this + } + } + + @SerialVersionUID(3L) + private final class GroupedIterator[A](xs: Array[A], groupSize: Int) extends AbstractIterator[Array[A]] with Serializable { + private[this] var pos = 0 + def hasNext: Boolean = pos < xs.length + def next(): Array[A] = { + if(pos >= xs.length) throw new NoSuchElementException + val r = new ArrayOps(xs).slice(pos, pos+groupSize) + pos += groupSize + r + } + } + + /** The cut-off point for the array size after which we switch from `Sorting.stableSort` to + * an implementation that copies the data to a boxed representation for use with `Arrays.sort`. + */ + private final val MaxStableSortLength = 300 + + /** Avoid an allocation in [[collect]]. */ + private val fallback: Any => Any = _ => fallback +} + +/** This class serves as a wrapper for `Array`s with many of the operations found in + * indexed sequences. Where needed, instances of arrays are implicitly converted + * into this class. There is generally no reason to create an instance explicitly or use + * an `ArrayOps` type. It is better to work with plain `Array` types instead and rely on + * the implicit conversion to `ArrayOps` when calling a method (which does not actually + * allocate an instance of `ArrayOps` because it is a value class). + * + * Neither `Array` nor `ArrayOps` are proper collection types + * (i.e. they do not extend `Iterable` or even `IterableOnce`). `mutable.ArraySeq` and + * `immutable.ArraySeq` serve this purpose. + * + * The difference between this class and `ArraySeq`s is that calling transformer methods such as + * `filter` and `map` will yield an array, whereas an `ArraySeq` will remain an `ArraySeq`. + * + * @tparam A type of the elements contained in this array. + */ +final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { + + @`inline` private[this] implicit def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) + + /** The size of this array. + * + * @return the number of elements in this array. + */ + @`inline` def size: Int = xs.length + + /** The size of this array. + * + * @return the number of elements in this array. + */ + @`inline` def knownSize: Int = xs.length + + /** Tests whether the array is empty. + * + * @return `true` if the array contains no elements, `false` otherwise. + */ + @`inline` def isEmpty: Boolean = xs.length == 0 + + /** Tests whether the array is not empty. + * + * @return `true` if the array contains at least one element, `false` otherwise. + */ + @`inline` def nonEmpty: Boolean = xs.length != 0 + + /** Selects the first element of this array. + * + * @return the first element of this array. + * @throws NoSuchElementException if the array is empty. + */ + def head: A = if (nonEmpty) xs.apply(0) else throw new NoSuchElementException("head of empty array") + + /** Selects the last element. + * + * @return The last element of this array. + * @throws NoSuchElementException If the array is empty. + */ + def last: A = if (nonEmpty) xs.apply(xs.length-1) else throw new NoSuchElementException("last of empty array") + + /** Optionally selects the first element. + * + * @return the first element of this array if it is nonempty, + * `None` if it is empty. + */ + def headOption: Option[A] = if(isEmpty) None else Some(head) + + /** Optionally selects the last element. + * + * @return the last element of this array$ if it is nonempty, + * `None` if it is empty. + */ + def lastOption: Option[A] = if(isEmpty) None else Some(last) + + /** Compares the size of this array to a test value. + * + * @param otherSize the test value that gets compared with the size. + * @return A value `x` where + * {{{ + * x < 0 if this.size < otherSize + * x == 0 if this.size == otherSize + * x > 0 if this.size > otherSize + * }}} + */ + def sizeCompare(otherSize: Int): Int = Integer.compare(xs.length, otherSize) + + /** Compares the length of this array to a test value. + * + * @param len the test value that gets compared with the length. + * @return A value `x` where + * {{{ + * x < 0 if this.length < len + * x == 0 if this.length == len + * x > 0 if this.length > len + * }}} + */ + def lengthCompare(len: Int): Int = Integer.compare(xs.length, len) + + /** Method mirroring [[SeqOps.sizeIs]] for consistency, except it returns an `Int` + * because `size` is known and comparison is constant-time. + * + * These operations are equivalent to [[sizeCompare(Int) `sizeCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.sizeIs < size // this.sizeCompare(size) < 0 + * this.sizeIs <= size // this.sizeCompare(size) <= 0 + * this.sizeIs == size // this.sizeCompare(size) == 0 + * this.sizeIs != size // this.sizeCompare(size) != 0 + * this.sizeIs >= size // this.sizeCompare(size) >= 0 + * this.sizeIs > size // this.sizeCompare(size) > 0 + * }}} + */ + def sizeIs: Int = xs.length + + /** Method mirroring [[SeqOps.lengthIs]] for consistency, except it returns an `Int` + * because `length` is known and comparison is constant-time. + * + * These operations are equivalent to [[lengthCompare(Int) `lengthCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.lengthIs < len // this.lengthCompare(len) < 0 + * this.lengthIs <= len // this.lengthCompare(len) <= 0 + * this.lengthIs == len // this.lengthCompare(len) == 0 + * this.lengthIs != len // this.lengthCompare(len) != 0 + * this.lengthIs >= len // this.lengthCompare(len) >= 0 + * this.lengthIs > len // this.lengthCompare(len) > 0 + * }}} + */ + def lengthIs: Int = xs.length + + /** Selects an interval of elements. The returned array is made up + * of all elements `x` which satisfy the invariant: + * {{{ + * from <= indexOf(x) < until + * }}} + * + * @param from the lowest index to include from this array. + * @param until the lowest index to EXCLUDE from this array. + * @return an array containing the elements greater than or equal to + * index `from` extending up to (but not including) index `until` + * of this array. + */ + def slice(from: Int, until: Int): Array[A] = { + import java.util.Arrays.copyOfRange + val lo = max(from, 0) + val hi = min(until, xs.length) + if (hi > lo) { + (((xs: Array[_]): @unchecked) match { + case x: Array[AnyRef] => copyOfRange(x, lo, hi) + case x: Array[Int] => copyOfRange(x, lo, hi) + case x: Array[Double] => copyOfRange(x, lo, hi) + case x: Array[Long] => copyOfRange(x, lo, hi) + case x: Array[Float] => copyOfRange(x, lo, hi) + case x: Array[Char] => copyOfRange(x, lo, hi) + case x: Array[Byte] => copyOfRange(x, lo, hi) + case x: Array[Short] => copyOfRange(x, lo, hi) + case x: Array[Boolean] => copyOfRange(x, lo, hi) + }).asInstanceOf[Array[A]] + } else new Array[A](0) + } + + /** The rest of the array without its first element. */ + def tail: Array[A] = + if(xs.length == 0) throw new UnsupportedOperationException("tail of empty array") else slice(1, xs.length) + + /** The initial part of the array without its last element. */ + def init: Array[A] = + if(xs.length == 0) throw new UnsupportedOperationException("init of empty array") else slice(0, xs.length-1) + + /** Iterates over the tails of this array. The first value will be this + * array and the final one will be an empty array, with the intervening + * values the results of successive applications of `tail`. + * + * @return an iterator over all the tails of this array + */ + def tails: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).tail) + + /** Iterates over the inits of this array. The first value will be this + * array and the final one will be an empty array, with the intervening + * values the results of successive applications of `init`. + * + * @return an iterator over all the inits of this array + */ + def inits: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).init) + + // A helper for tails and inits. + private[this] def iterateUntilEmpty(f: Array[A] => Array[A]): Iterator[Array[A]] = + Iterator.iterate(xs)(f).takeWhile(x => x.length != 0) ++ Iterator.single(Array.empty[A]) + + /** An array containing the first `n` elements of this array. */ + def take(n: Int): Array[A] = slice(0, n) + + /** The rest of the array without its `n` first elements. */ + def drop(n: Int): Array[A] = slice(n, xs.length) + + /** An array containing the last `n` elements of this array. */ + def takeRight(n: Int): Array[A] = drop(xs.length - max(n, 0)) + + /** The rest of the array without its `n` last elements. */ + def dropRight(n: Int): Array[A] = take(xs.length - max(n, 0)) + + /** Takes longest prefix of elements that satisfy a predicate. + * + * @param p The predicate used to test elements. + * @return the longest prefix of this array whose elements all satisfy + * the predicate `p`. + */ + def takeWhile(p: A => Boolean): Array[A] = { + val i = indexWhere(x => !p(x)) + val hi = if(i < 0) xs.length else i + slice(0, hi) + } + + /** Drops longest prefix of elements that satisfy a predicate. + * + * @param p The predicate used to test elements. + * @return the longest suffix of this array whose first element + * does not satisfy the predicate `p`. + */ + def dropWhile(p: A => Boolean): Array[A] = { + val i = indexWhere(x => !p(x)) + val lo = if(i < 0) xs.length else i + slice(lo, xs.length) + } + + def iterator: Iterator[A] = + ((xs: Any @unchecked) match { + case xs: Array[AnyRef] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Int] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Double] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Long] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Float] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Char] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Byte] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Short] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Boolean] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Unit] => new ArrayOps.ArrayIterator(xs) + case null => throw new NullPointerException + }).asInstanceOf[Iterator[A]] + + def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import convert.impl._ + val s = (shape.shape: @unchecked) match { + case StepperShape.ReferenceShape => (xs: Any) match { + case bs: Array[Boolean] => new BoxedBooleanArrayStepper(bs, 0, xs.length) + case _ => new ObjectArrayStepper[AnyRef](xs.asInstanceOf[Array[AnyRef ]], 0, xs.length) + } + case StepperShape.IntShape => new IntArrayStepper (xs.asInstanceOf[Array[Int ]], 0, xs.length) + case StepperShape.LongShape => new LongArrayStepper (xs.asInstanceOf[Array[Long ]], 0, xs.length) + case StepperShape.DoubleShape => new DoubleArrayStepper (xs.asInstanceOf[Array[Double ]], 0, xs.length) + case StepperShape.ByteShape => new WidenedByteArrayStepper (xs.asInstanceOf[Array[Byte ]], 0, xs.length) + case StepperShape.ShortShape => new WidenedShortArrayStepper (xs.asInstanceOf[Array[Short ]], 0, xs.length) + case StepperShape.CharShape => new WidenedCharArrayStepper (xs.asInstanceOf[Array[Char ]], 0, xs.length) + case StepperShape.FloatShape => new WidenedFloatArrayStepper (xs.asInstanceOf[Array[Float ]], 0, xs.length) + } + s.asInstanceOf[S with EfficientSplit] + } + + /** Partitions elements in fixed size arrays. + * @see [[scala.collection.Iterator]], method `grouped` + * + * @param size the number of elements per group + * @return An iterator producing arrays of size `size`, except the + * last will be less than size `size` if the elements don't divide evenly. + */ + def grouped(size: Int): Iterator[Array[A]] = new ArrayOps.GroupedIterator[A](xs, size) + + /** Splits this array into a prefix/suffix pair according to a predicate. + * + * Note: `c span p` is equivalent to (but more efficient than) + * `(c takeWhile p, c dropWhile p)`, provided the evaluation of the + * predicate `p` does not cause any side-effects. + * + * @param p the test predicate + * @return a pair consisting of the longest prefix of this array whose + * elements all satisfy `p`, and the rest of this array. + */ + def span(p: A => Boolean): (Array[A], Array[A]) = { + val i = indexWhere(x => !p(x)) + val idx = if(i < 0) xs.length else i + (slice(0, idx), slice(idx, xs.length)) + } + + /** Splits this array into two at a given position. + * Note: `c splitAt n` is equivalent to `(c take n, c drop n)`. + * + * @param n the position at which to split. + * @return a pair of arrays consisting of the first `n` + * elements of this array, and the other elements. + */ + def splitAt(n: Int): (Array[A], Array[A]) = (take(n), drop(n)) + + /** A pair of, first, all elements that satisfy predicate `p` and, second, all elements that do not. */ + def partition(p: A => Boolean): (Array[A], Array[A]) = { + val res1, res2 = ArrayBuilder.make[A] + var i = 0 + while(i < xs.length) { + val x = xs(i) + (if(p(x)) res1 else res2) += x + i += 1 + } + (res1.result(), res2.result()) + } + + /** Applies a function `f` to each element of the array and returns a pair of arrays: the first one + * made of those values returned by `f` that were wrapped in [[scala.util.Left]], and the second + * one made of those wrapped in [[scala.util.Right]]. + * + * Example: + * {{{ + * val xs = Array(1, "one", 2, "two", 3, "three") partitionMap { + * case i: Int => Left(i) + * case s: String => Right(s) + * } + * // xs == (Array(1, 2, 3), + * // Array(one, two, three)) + * }}} + * + * @tparam A1 the element type of the first resulting collection + * @tparam A2 the element type of the second resulting collection + * @param f the 'split function' mapping the elements of this array to an [[scala.util.Either]] + * + * @return a pair of arrays: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], + * and the second one made of those wrapped in [[scala.util.Right]]. */ + def partitionMap[A1: ClassTag, A2: ClassTag](f: A => Either[A1, A2]): (Array[A1], Array[A2]) = { + val res1 = ArrayBuilder.make[A1] + val res2 = ArrayBuilder.make[A2] + var i = 0 + while(i < xs.length) { + f(xs(i)) match { + case Left(x) => res1 += x + case Right(x) => res2 += x + } + i += 1 + } + (res1.result(), res2.result()) + } + + /** Returns a new array with the elements in reversed order. */ + @inline def reverse: Array[A] = { + val len = xs.length + val res = new Array[A](len) + var i = 0 + while(i < len) { + res(len-i-1) = xs(i) + i += 1 + } + res + } + + /** An iterator yielding elements in reversed order. + * + * Note: `xs.reverseIterator` is the same as `xs.reverse.iterator` but implemented more efficiently. + * + * @return an iterator yielding the elements of this array in reversed order + */ + def reverseIterator: Iterator[A] = + ((xs: Any @unchecked) match { + case xs: Array[AnyRef] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Int] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Double] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Long] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Float] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Char] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Byte] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Short] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Boolean] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Unit] => new ArrayOps.ReverseIterator(xs) + case null => throw new NullPointerException + }).asInstanceOf[Iterator[A]] + + /** Selects all elements of this array which satisfy a predicate. + * + * @param p the predicate used to test elements. + * @return a new array consisting of all elements of this array that satisfy the given predicate `p`. + */ + def filter(p: A => Boolean): Array[A] = { + val res = ArrayBuilder.make[A] + var i = 0 + while(i < xs.length) { + val x = xs(i) + if(p(x)) res += x + i += 1 + } + res.result() + } + + /** Selects all elements of this array which do not satisfy a predicate. + * + * @param p the predicate used to test elements. + * @return a new array consisting of all elements of this array that do not satisfy the given predicate `p`. + */ + def filterNot(p: A => Boolean): Array[A] = filter(x => !p(x)) + + /** Sorts this array according to an Ordering. + * + * The sort is stable. That is, elements that are equal (as determined by + * `lt`) appear in the same order in the sorted sequence as in the original. + * + * @see [[scala.math.Ordering]] + * + * @param ord the ordering to be used to compare elements. + * @return an array consisting of the elements of this array + * sorted according to the ordering `ord`. + */ + def sorted[B >: A](implicit ord: Ordering[B]): Array[A] = { + val len = xs.length + def boxed = if(len < ArrayOps.MaxStableSortLength) { + val a = xs.clone() + Sorting.stableSort(a)(ord.asInstanceOf[Ordering[A]]) + a + } else { + val a = Array.copyAs[AnyRef](xs, len)(ClassTag.AnyRef) + Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]) + Array.copyAs[A](a, len) + } + if(len <= 1) xs.clone() + else ((xs: Array[_]) match { + case xs: Array[AnyRef] => + val a = Arrays.copyOf(xs, len); Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]); a + case xs: Array[Int] => + if(ord eq Ordering.Int) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Long] => + if(ord eq Ordering.Long) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Char] => + if(ord eq Ordering.Char) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Byte] => + if(ord eq Ordering.Byte) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Short] => + if(ord eq Ordering.Short) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Boolean] => + if(ord eq Ordering.Boolean) { val a = Arrays.copyOf(xs, len); Sorting.stableSort(a); a } + else boxed + case xs => boxed + }).asInstanceOf[Array[A]] + } + + /** Sorts this array according to a comparison function. + * + * The sort is stable. That is, elements that are equal (as determined by + * `lt`) appear in the same order in the sorted sequence as in the original. + * + * @param lt the comparison function which tests whether + * its first argument precedes its second argument in + * the desired ordering. + * @return an array consisting of the elements of this array + * sorted according to the comparison function `lt`. + */ + def sortWith(lt: (A, A) => Boolean): Array[A] = sorted(Ordering.fromLessThan(lt)) + + /** Sorts this array according to the Ordering which results from transforming + * an implicitly given Ordering with a transformation function. + * + * @see [[scala.math.Ordering]] + * @param f the transformation function mapping elements + * to some other domain `B`. + * @param ord the ordering assumed on domain `B`. + * @tparam B the target type of the transformation `f`, and the type where + * the ordering `ord` is defined. + * @return an array consisting of the elements of this array + * sorted according to the ordering where `x < y` if + * `ord.lt(f(x), f(y))`. + */ + def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Array[A] = sorted(ord on f) + + /** Creates a non-strict filter of this array. + * + * Note: the difference between `c filter p` and `c withFilter p` is that + * the former creates a new array, whereas the latter only + * restricts the domain of subsequent `map`, `flatMap`, `foreach`, + * and `withFilter` operations. + * + * @param p the predicate used to test elements. + * @return an object of class `ArrayOps.WithFilter`, which supports + * `map`, `flatMap`, `foreach`, and `withFilter` operations. + * All these operations apply to those elements of this array + * which satisfy the predicate `p`. + */ + def withFilter(p: A => Boolean): ArrayOps.WithFilter[A] = new ArrayOps.WithFilter[A](p, xs) + + /** Finds index of first occurrence of some value in this array after or at some start index. + * + * @param elem the element value to search for. + * @param from the start index + * @return the index `>= from` of the first element of this array that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def indexOf(elem: A, from: Int = 0): Int = { + var i = from + while(i < xs.length) { + if(elem == xs(i)) return i + i += 1 + } + -1 + } + + /** Finds index of the first element satisfying some predicate after or at some start index. + * + * @param p the predicate used to test elements. + * @param from the start index + * @return the index `>= from` of the first element of this array that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def indexWhere(@deprecatedName("f", "2.13.3") p: A => Boolean, from: Int = 0): Int = { + var i = from + while(i < xs.length) { + if(p(xs(i))) return i + i += 1 + } + -1 + } + + /** Finds index of last occurrence of some value in this array before or at a given end index. + * + * @param elem the element value to search for. + * @param end the end index. + * @return the index `<= end` of the last element of this array that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def lastIndexOf(elem: A, end: Int = xs.length - 1): Int = { + var i = min(end, xs.length-1) + while(i >= 0) { + if(elem == xs(i)) return i + i -= 1 + } + -1 + } + + /** Finds index of last element satisfying some predicate before or at given end index. + * + * @param p the predicate used to test elements. + * @return the index `<= end` of the last element of this array that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def lastIndexWhere(p: A => Boolean, end: Int = xs.length - 1): Int = { + var i = min(end, xs.length-1) + while(i >= 0) { + if(p(xs(i))) return i + i -= 1 + } + -1 + } + + /** Finds the first element of the array satisfying a predicate, if any. + * + * @param p the predicate used to test elements. + * @return an option value containing the first element in the array + * that satisfies `p`, or `None` if none exists. + */ + def find(@deprecatedName("f", "2.13.3") p: A => Boolean): Option[A] = { + val idx = indexWhere(p) + if(idx == -1) None else Some(xs(idx)) + } + + /** Tests whether a predicate holds for at least one element of this array. + * + * @param p the predicate used to test elements. + * @return `true` if the given predicate `p` is satisfied by at least one element of this array, otherwise `false` + */ + def exists(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = indexWhere(p) >= 0 + + /** Tests whether a predicate holds for all elements of this array. + * + * @param p the predicate used to test elements. + * @return `true` if this array is empty or the given predicate `p` + * holds for all elements of this array, otherwise `false`. + */ + def forall(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = { + var i = 0 + while(i < xs.length) { + if(!p(xs(i))) return false + i += 1 + } + true + } + + /** Applies a binary operator to a start value and all elements of this array, + * going left to right. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return the result of inserting `op` between consecutive elements of this array, + * going left to right with the start value `z` on the left: + * {{{ + * op(...op(z, x_1), x_2, ..., x_n) + * }}} + * where `x,,1,,, ..., x,,n,,` are the elements of this array. + * Returns `z` if this array is empty. + */ + def foldLeft[B](z: B)(op: (B, A) => B): B = { + def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + val length = xs.length + var v: Any = z + var i = 0 + while(i < length) { + v = op(v, xs(i)) + i += 1 + } + v + } + ((xs: Any @unchecked) match { + case null => throw new NullPointerException // null-check first helps static analysis of instanceOf + case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + }).asInstanceOf[B] + } + + /** Produces an array containing cumulative results of applying the binary + * operator going left to right. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return array with intermediate values. + * + * Example: + * {{{ + * Array(1, 2, 3, 4).scanLeft(0)(_ + _) == Array(0, 1, 3, 6, 10) + * }}} + * + */ + def scanLeft[ B : ClassTag ](z: B)(op: (B, A) => B): Array[B] = { + var v = z + var i = 0 + val res = new Array[B](xs.length + 1) + while(i < xs.length) { + res(i) = v + v = op(v, xs(i)) + i += 1 + } + res(i) = v + res + } + + /** Computes a prefix scan of the elements of the array. + * + * Note: The neutral element `z` may be applied more than once. + * + * @tparam B element type of the resulting array + * @param z neutral element for the operator `op` + * @param op the associative operator for the scan + * + * @return a new array containing the prefix scan of the elements in this array + */ + def scan[B >: A : ClassTag](z: B)(op: (B, B) => B): Array[B] = scanLeft(z)(op) + + /** Produces an array containing cumulative results of applying the binary + * operator going right to left. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return array with intermediate values. + * + * Example: + * {{{ + * Array(4, 3, 2, 1).scanRight(0)(_ + _) == Array(10, 6, 3, 1, 0) + * }}} + * + */ + def scanRight[ B : ClassTag ](z: B)(op: (A, B) => B): Array[B] = { + var v = z + var i = xs.length - 1 + val res = new Array[B](xs.length + 1) + res(xs.length) = z + while(i >= 0) { + v = op(xs(i), v) + res(i) = v + i -= 1 + } + res + } + + /** Applies a binary operator to all elements of this array and a start value, + * going right to left. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return the result of inserting `op` between consecutive elements of this array, + * going right to left with the start value `z` on the right: + * {{{ + * op(x_1, op(x_2, ... op(x_n, z)...)) + * }}} + * where `x,,1,,, ..., x,,n,,` are the elements of this array. + * Returns `z` if this array is empty. + */ + def foldRight[B](z: B)(op: (A, B) => B): B = { + def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + var v = z + var i = xs.length - 1 + while(i >= 0) { + v = op(xs(i), v) + i -= 1 + } + v + } + ((xs: Any @unchecked) match { + case null => throw new NullPointerException + case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + }).asInstanceOf[B] + + } + + /** Folds the elements of this array using the specified associative binary operator. + * + * @tparam A1 a type parameter for the binary operator, a supertype of `A`. + * @param z a neutral element for the fold operation; may be added to the result + * an arbitrary number of times, and must not change the result (e.g., `Nil` for list concatenation, + * 0 for addition, or 1 for multiplication). + * @param op a binary operator that must be associative. + * @return the result of applying the fold operator `op` between all the elements, or `z` if this array is empty. + */ + def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op) + + /** Builds a new array by applying a function to all elements of this array. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given function + * `f` to each element of this array and collecting the results. + */ + def map[B](f: A => B)(implicit ct: ClassTag[B]): Array[B] = { + val len = xs.length + val ys = new Array[B](len) + if(len > 0) { + var i = 0 + (xs: Any @unchecked) match { + case xs: Array[AnyRef] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Int] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Double] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Long] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Float] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Char] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Byte] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Short] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Boolean] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + } + } + ys + } + + def mapInPlace(f: A => A): Array[A] = { + var i = 0 + while (i < xs.length) { + xs.update(i, f(xs(i))) + i = i + 1 + } + xs + } + + /** Builds a new array by applying a function to all elements of this array + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given collection-valued function + * `f` to each element of this array and concatenating the results. + */ + def flatMap[B : ClassTag](f: A => IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while(i < xs.length) { + b ++= f(xs(i)) + i += 1 + } + b.result() + } + + def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + flatMap[B](x => asIterable(f(x))) + + /** Flattens a two-dimensional array by concatenating all its rows + * into a single array. + * + * @tparam B Type of row elements. + * @param asIterable A function that converts elements of this array to rows - Iterables of type `B`. + * @return An array obtained by concatenating rows of this array. + */ + def flatten[B](implicit asIterable: A => IterableOnce[B], m: ClassTag[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val len = xs.length + var size = 0 + var i = 0 + while(i < len) { + xs(i) match { + case it: IterableOnce[_] => + val k = it.knownSize + if(k > 0) size += k + case a: Array[_] => size += a.length + case _ => + } + i += 1 + } + if(size > 0) b.sizeHint(size) + i = 0 + while(i < len) { + b ++= asIterable(xs(i)) + i += 1 + } + b.result() + } + + /** Builds a new array by applying a partial function to all elements of this array + * on which the function is defined. + * + * @param pf the partial function which filters and maps the array. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[B: ClassTag](pf: PartialFunction[A, B]): Array[B] = { + val fallback: Any => Any = ArrayOps.fallback + val b = ArrayBuilder.make[B] + var i = 0 + while (i < xs.length) { + val v = pf.applyOrElse(xs(i), fallback) + if (v.asInstanceOf[AnyRef] ne fallback) b.addOne(v.asInstanceOf[B]) + i += 1 + } + b.result() + } + + /** Finds the first element of the array for which the given partial function is defined, and applies the + * partial function to it. */ + def collectFirst[B](@deprecatedName("f","2.13.9") pf: PartialFunction[A, B]): Option[B] = { + val fallback: Any => Any = ArrayOps.fallback + var i = 0 + while (i < xs.length) { + val v = pf.applyOrElse(xs(i), fallback) + if (v.asInstanceOf[AnyRef] ne fallback) return Some(v.asInstanceOf[B]) + i += 1 + } + None + } + + /** Returns an array formed from this array and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is longer than the other, its remaining elements are ignored. + * + * @param that The iterable providing the second half of each result pair + * @tparam B the type of the second half of the returned pairs + * @return a new array containing pairs consisting of corresponding elements of this array and `that`. + * The length of the returned array is the minimum of the lengths of this array and `that`. + */ + def zip[B](that: IterableOnce[B]): Array[(A, B)] = { + val b = new ArrayBuilder.ofRef[(A, B)]() + val k = that.knownSize + b.sizeHint(if(k >= 0) min(k, xs.length) else xs.length) + var i = 0 + val it = that.iterator + while(i < xs.length && it.hasNext) { + b += ((xs(i), it.next())) + i += 1 + } + b.result() + } + + /** Analogous to `zip` except that the elements in each collection are not consumed until a strict operation is + * invoked on the returned `LazyZip2` decorator. + * + * Calls to `lazyZip` can be chained to support higher arities (up to 4) without incurring the expense of + * constructing and deconstructing intermediary tuples. + * + * {{{ + * val xs = List(1, 2, 3) + * val res = (xs lazyZip xs lazyZip xs lazyZip xs).map((a, b, c, d) => a + b + c + d) + * // res == List(4, 8, 12) + * }}} + * + * @param that the iterable providing the second element of each eventual pair + * @tparam B the type of the second element in each eventual pair + * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs + * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, Array[A]] = new LazyZip2(xs, immutable.ArraySeq.unsafeWrapArray(xs), that) + + /** Returns an array formed from this array and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is shorter than the other, + * placeholder elements are used to extend the shorter collection to the length of the longer. + * + * @param that the iterable providing the second half of each result pair + * @param thisElem the element to be used to fill up the result if this array is shorter than `that`. + * @param thatElem the element to be used to fill up the result if `that` is shorter than this array. + * @return a new array containing pairs consisting of corresponding elements of this array and `that`. + * The length of the returned array is the maximum of the lengths of this array and `that`. + * If this array is shorter than `that`, `thisElem` values are used to pad the result. + * If `that` is shorter than this array, `thatElem` values are used to pad the result. + */ + def zipAll[A1 >: A, B](that: Iterable[B], thisElem: A1, thatElem: B): Array[(A1, B)] = { + val b = new ArrayBuilder.ofRef[(A1, B)]() + val k = that.knownSize + b.sizeHint(max(k, xs.length)) + var i = 0 + val it = that.iterator + while(i < xs.length && it.hasNext) { + b += ((xs(i), it.next())) + i += 1 + } + while(it.hasNext) { + b += ((thisElem, it.next())) + i += 1 + } + while(i < xs.length) { + b += ((xs(i), thatElem)) + i += 1 + } + b.result() + } + + /** Zips this array with its indices. + * + * @return A new array containing pairs consisting of all elements of this array paired with their index. + * Indices start at `0`. + */ + def zipWithIndex: Array[(A, Int)] = { + val b = new Array[(A, Int)](xs.length) + var i = 0 + while(i < xs.length) { + b(i) = ((xs(i), i)) + i += 1 + } + b + } + + /** A copy of this array with an element appended. */ + def appended[B >: A : ClassTag](x: B): Array[B] = { + val dest = Array.copyAs[B](xs, xs.length+1) + dest(xs.length) = x + dest + } + + @`inline` final def :+ [B >: A : ClassTag](x: B): Array[B] = appended(x) + + /** A copy of this array with an element prepended. */ + def prepended[B >: A : ClassTag](x: B): Array[B] = { + val dest = new Array[B](xs.length + 1) + dest(0) = x + Array.copy(xs, 0, dest, 1, xs.length) + dest + } + + @`inline` final def +: [B >: A : ClassTag](x: B): Array[B] = prepended(x) + + /** A copy of this array with all elements of a collection prepended. */ + def prependedAll[B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val k = prefix.knownSize + if(k >= 0) b.sizeHint(k + xs.length) + b.addAll(prefix) + if(k < 0) b.sizeHint(b.length + xs.length) + b.addAll(xs) + b.result() + } + + /** A copy of this array with all elements of an array prepended. */ + def prependedAll[B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = { + val dest = Array.copyAs[B](prefix, prefix.length+xs.length) + Array.copy(xs, 0, dest, prefix.length, xs.length) + dest + } + + @`inline` final def ++: [B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = prependedAll(prefix) + + @`inline` final def ++: [B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = prependedAll(prefix) + + /** A copy of this array with all elements of a collection appended. */ + def appendedAll[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val k = suffix.knownSize + if(k >= 0) b.sizeHint(k + xs.length) + b.addAll(xs) + b.addAll(suffix) + b.result() + } + + /** A copy of this array with all elements of an array appended. */ + def appendedAll[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = { + val dest = Array.copyAs[B](xs, xs.length+suffix.length) + Array.copy(suffix, 0, dest, xs.length, suffix.length) + dest + } + + @`inline` final def :++ [B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + + @`inline` final def :++ [B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + + @`inline` final def concat[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + + @`inline` final def concat[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + + @`inline` final def ++[B >: A : ClassTag](xs: IterableOnce[B]): Array[B] = appendedAll(xs) + + @`inline` final def ++[B >: A : ClassTag](xs: Array[_ <: B]): Array[B] = appendedAll(xs) + + /** Tests whether this array contains a given value as an element. + * + * @param elem the element to test. + * @return `true` if this array has an element that is equal (as + * determined by `==`) to `elem`, `false` otherwise. + */ + def contains(elem: A): Boolean = exists (_ == elem) + + /** Returns a copy of this array with patched values. + * Patching at negative indices is the same as patching starting at 0. + * Patching at indices at or larger than the length of the original array appends the patch to the end. + * If more values are replaced than actually exist, the excess is ignored. + * + * @param from The start index from which to patch + * @param other The patch values + * @param replaced The number of values in the original array that are replaced by the patch. + */ + def patch[B >: A : ClassTag](from: Int, other: IterableOnce[B], replaced: Int): Array[B] = { + val b = ArrayBuilder.make[B] + val k = other.knownSize + val r = if(replaced < 0) 0 else replaced + if(k >= 0) b.sizeHint(xs.length + k - r) + val chunk1 = if(from > 0) min(from, xs.length) else 0 + if(chunk1 > 0) b.addAll(xs, 0, chunk1) + b ++= other + val remaining = xs.length - chunk1 - r + if(remaining > 0) b.addAll(xs, xs.length - remaining, remaining) + b.result() + } + + /** Converts an array of pairs into an array of first elements and an array of second elements. + * + * @tparam A1 the type of the first half of the element pairs + * @tparam A2 the type of the second half of the element pairs + * @param asPair an implicit conversion which asserts that the element type + * of this Array is a pair. + * @param ct1 a class tag for `A1` type parameter that is required to create an instance + * of `Array[A1]` + * @param ct2 a class tag for `A2` type parameter that is required to create an instance + * of `Array[A2]` + * @return a pair of Arrays, containing, respectively, the first and second half + * of each element pair of this Array. + */ + def unzip[A1, A2](implicit asPair: A => (A1, A2), ct1: ClassTag[A1], ct2: ClassTag[A2]): (Array[A1], Array[A2]) = { + val a1 = new Array[A1](xs.length) + val a2 = new Array[A2](xs.length) + var i = 0 + while (i < xs.length) { + val e = asPair(xs(i)) + a1(i) = e._1 + a2(i) = e._2 + i += 1 + } + (a1, a2) + } + + /** Converts an array of triples into three arrays, one containing the elements from each position of the triple. + * + * @tparam A1 the type of the first of three elements in the triple + * @tparam A2 the type of the second of three elements in the triple + * @tparam A3 the type of the third of three elements in the triple + * @param asTriple an implicit conversion which asserts that the element type + * of this Array is a triple. + * @param ct1 a class tag for T1 type parameter that is required to create an instance + * of Array[T1] + * @param ct2 a class tag for T2 type parameter that is required to create an instance + * of Array[T2] + * @param ct3 a class tag for T3 type parameter that is required to create an instance + * of Array[T3] + * @return a triple of Arrays, containing, respectively, the first, second, and third + * elements from each element triple of this Array. + */ + def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3), ct1: ClassTag[A1], ct2: ClassTag[A2], + ct3: ClassTag[A3]): (Array[A1], Array[A2], Array[A3]) = { + val a1 = new Array[A1](xs.length) + val a2 = new Array[A2](xs.length) + val a3 = new Array[A3](xs.length) + var i = 0 + while (i < xs.length) { + val e = asTriple(xs(i)) + a1(i) = e._1 + a2(i) = e._2 + a3(i) = e._3 + i += 1 + } + (a1, a2, a3) + } + + /** Transposes a two dimensional array. + * + * @tparam B Type of row elements. + * @param asArray A function that converts elements of this array to rows - arrays of type `B`. + * @return An array obtained by replacing elements of this arrays with rows the represent. + */ + def transpose[B](implicit asArray: A => Array[B]): Array[Array[B]] = { + val aClass = xs.getClass.getComponentType + val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass)) + if (xs.length == 0) bb.result() + else { + def mkRowBuilder() = ArrayBuilder.make[B](ClassTag[B](aClass.getComponentType)) + val bs = new ArrayOps(asArray(xs(0))).map((x: B) => mkRowBuilder()) + for (xs <- this) { + var i = 0 + for (x <- new ArrayOps(asArray(xs))) { + bs(i) += x + i += 1 + } + } + for (b <- new ArrayOps(bs)) bb += b.result() + bb.result() + } + } + + /** Apply `f` to each element for its side effects. + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreach[U](f: A => U): Unit = { + val len = xs.length + var i = 0 + (xs: Any @unchecked) match { + case xs: Array[AnyRef] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Int] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Double] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Long] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Float] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Char] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Byte] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Short] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Boolean] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + } + } + + /** Selects all the elements of this array ignoring the duplicates. + * + * @return a new array consisting of all the elements of this array without duplicates. + */ + def distinct: Array[A] = distinctBy(identity) + + /** Selects all the elements of this array ignoring the duplicates as determined by `==` after applying + * the transforming function `f`. + * + * @param f The transforming function whose result is used to determine the uniqueness of each element + * @tparam B the type of the elements after being transformed by `f` + * @return a new array consisting of all the elements of this array without duplicates. + */ + def distinctBy[B](f: A => B): Array[A] = + ArrayBuilder.make[A].addAll(iterator.distinctBy(f)).result() + + /** A copy of this array with an element value appended until a given target length is reached. + * + * @param len the target length + * @param elem the padding value + * @tparam B the element type of the returned array. + * @return a new array consisting of + * all elements of this array followed by the minimal number of occurrences of `elem` so + * that the resulting collection has a length of at least `len`. + */ + def padTo[B >: A : ClassTag](len: Int, elem: B): Array[B] = { + var i = xs.length + val newlen = max(i, len) + val dest = Array.copyAs[B](xs, newlen) + while(i < newlen) { + dest(i) = elem + i += 1 + } + dest + } + + /** Produces the range of all indices of this sequence. + * + * @return a `Range` value from `0` to one less than the length of this array. + */ + def indices: Range = Range(0, xs.length) + + /** Partitions this array into a map of arrays according to some discriminator function. + * + * @param f the discriminator function. + * @tparam K the type of keys returned by the discriminator function. + * @return A map from keys to arrays such that the following invariant holds: + * {{{ + * (xs groupBy f)(k) = xs filter (x => f(x) == k) + * }}} + * That is, every key `k` is bound to an array of those elements `x` + * for which `f(x)` equals `k`. + */ + def groupBy[K](f: A => K): immutable.Map[K, Array[A]] = { + val m = mutable.Map.empty[K, ArrayBuilder[A]] + val len = xs.length + var i = 0 + while(i < len) { + val elem = xs(i) + val key = f(elem) + val bldr = m.getOrElseUpdate(key, ArrayBuilder.make[A]) + bldr += elem + i += 1 + } + m.view.mapValues(_.result()).toMap + } + + /** + * Partitions this array into a map of arrays according to a discriminator function `key`. + * Each element in a group is transformed into a value of type `B` using the `value` function. + * + * It is equivalent to `groupBy(key).mapValues(_.map(f))`, but more efficient. + * + * {{{ + * case class User(name: String, age: Int) + * + * def namesByAge(users: Array[User]): Map[Int, Array[String]] = + * users.groupMap(_.age)(_.name) + * }}} + * + * @param key the discriminator function + * @param f the element transformation function + * @tparam K the type of keys returned by the discriminator function + * @tparam B the type of values returned by the transformation function + */ + def groupMap[K, B : ClassTag](key: A => K)(f: A => B): immutable.Map[K, Array[B]] = { + val m = mutable.Map.empty[K, ArrayBuilder[B]] + val len = xs.length + var i = 0 + while(i < len) { + val elem = xs(i) + val k = key(elem) + val bldr = m.getOrElseUpdate(k, ArrayBuilder.make[B]) + bldr += f(elem) + i += 1 + } + m.view.mapValues(_.result()).toMap + } + + @`inline` final def toSeq: immutable.Seq[A] = toIndexedSeq + + def toIndexedSeq: immutable.IndexedSeq[A] = + immutable.ArraySeq.unsafeWrapArray(Array.copyOf(xs, xs.length)) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index 0. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B]): Int = copyToArray(xs, 0) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index `start`. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @param start the starting index within the destination array. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index `start` with at most `len` values. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached, or `len` elements have been copied. + * + * @param xs the array to fill. + * @param start the starting index within the destination array. + * @param len the maximal number of elements to copy. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(this.xs.length, xs.length, start, len) + if (copied > 0) { + Array.copy(this.xs, 0, xs, start, copied) + } + copied + } + + /** Create a copy of this array with the specified element type. */ + def toArray[B >: A: ClassTag]: Array[B] = { + val destination = new Array[B](xs.length) + copyToArray(destination, 0) + destination + } + + /** Counts the number of elements in this array which satisfy a predicate */ + def count(p: A => Boolean): Int = { + var i, res = 0 + val len = xs.length + while(i < len) { + if(p(xs(i))) res += 1 + i += 1 + } + res + } + + // can't use a default arg because we already have another overload with a default arg + /** Tests whether this array starts with the given array. */ + @`inline` def startsWith[B >: A](that: Array[B]): Boolean = startsWith(that, 0) + + /** Tests whether this array contains the given array at a given index. + * + * @param that the array to test + * @param offset the index where the array is searched. + * @return `true` if the array `that` is contained in this array at + * index `offset`, otherwise `false`. + */ + def startsWith[B >: A](that: Array[B], offset: Int): Boolean = { + val safeOffset = offset.max(0) + val thatl = that.length + if(thatl > xs.length-safeOffset) thatl == 0 + else { + var i = 0 + while(i < thatl) { + if(xs(i+safeOffset) != that(i)) return false + i += 1 + } + true + } + } + + /** Tests whether this array ends with the given array. + * + * @param that the array to test + * @return `true` if this array has `that` as a suffix, `false` otherwise. + */ + def endsWith[B >: A](that: Array[B]): Boolean = { + val thatl = that.length + val off = xs.length - thatl + if(off < 0) false + else { + var i = 0 + while(i < thatl) { + if(xs(i+off) != that(i)) return false + i += 1 + } + true + } + } + + /** A copy of this array with one single replaced element. + * @param index the position of the replacement + * @param elem the replacing element + * @return a new array which is a copy of this array with the element at position `index` replaced by `elem`. + * @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`. + */ + def updated[B >: A : ClassTag](index: Int, elem: B): Array[B] = { + if(index < 0 || index >= xs.length) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${xs.length-1})") + val dest = toArray[B] + dest(index) = elem + dest + } + + @`inline` def view: IndexedSeqView[A] = new ArrayOps.ArrayView[A](xs) + + + /* ************************************************************************************************************ + The remaining methods are provided for completeness but they delegate to mutable.ArraySeq implementations which + may not provide the best possible performance. We need them in `ArrayOps` because their return type + mentions `C` (which is `Array[A]` in `StringOps` and `mutable.ArraySeq[A]` in `mutable.ArraySeq`). + ************************************************************************************************************ */ + + + /** Computes the multiset difference between this array and another sequence. + * + * @param that the sequence of elements to remove + * @return a new array which contains all elements of this array + * except some of occurrences of elements that also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form + * part of the result, but any following occurrences will. + */ + def diff[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).diff(that).toArray[A] + + /** Computes the multiset intersection between this array and another sequence. + * + * @param that the sequence of elements to intersect with. + * @return a new array which contains all elements of this array + * which also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained + * in the result, but any following occurrences will be omitted. + */ + def intersect[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).intersect(that).toArray[A] + + /** Groups elements in fixed size blocks by passing a "sliding window" + * over them (as opposed to partitioning them, as is done in grouped.) + * @see [[scala.collection.Iterator]], method `sliding` + * + * @param size the number of elements per group + * @param step the distance between the first elements of successive groups + * @return An iterator producing arrays of size `size`, except the + * last element (which may be the only element) will be truncated + * if there are fewer than `size` elements remaining to be grouped. + */ + def sliding(size: Int, step: Int = 1): Iterator[Array[A]] = mutable.ArraySeq.make(xs).sliding(size, step).map(_.toArray[A]) + + /** Iterates over combinations of elements. + * + * A '''combination''' of length `n` is a sequence of `n` elements selected in order of their first index in this sequence. + * + * For example, `"xyx"` has two combinations of length 2. The `x` is selected first: `"xx"`, `"xy"`. + * The sequence `"yx"` is not returned as a combination because it is subsumed by `"xy"`. + * + * If there is more than one way to generate the same combination, only one will be returned. + * + * For example, the result `"xy"` arbitrarily selected one of the `x` elements. + * + * As a further illustration, `"xyxx"` has three different ways to generate `"xy"` because there are three elements `x` + * to choose from. Moreover, there are three unordered pairs `"xx"` but only one is returned. + * + * It is not specified which of these equal combinations is returned. It is an implementation detail + * that should not be relied on. For example, the combination `"xx"` does not necessarily contain + * the first `x` in this sequence. This behavior is observable if the elements compare equal + * but are not identical. + * + * As a consequence, `"xyx".combinations(3).next()` is `"xxy"`: the combination does not reflect the order + * of the original sequence, but the order in which elements were selected, by "first index"; + * the order of each `x` element is also arbitrary. + * + * @return An Iterator which traverses the n-element combinations of this array + * @example {{{ + * Array('a', 'b', 'b', 'b', 'c').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(a, b) + * // Array(a, c) + * // Array(b, b) + * // Array(b, c) + * Array('b', 'a', 'b').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(b, b) + * // Array(b, a) + * }}} + */ + def combinations(n: Int): Iterator[Array[A]] = mutable.ArraySeq.make(xs).combinations(n).map(_.toArray[A]) + + /** Iterates over distinct permutations of elements. + * + * @return An Iterator which traverses the distinct permutations of this array. + * @example {{{ + * Array('a', 'b', 'b').permutations.map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(a, b, b) + * // Array(b, a, b) + * // Array(b, b, a) + * }}} + */ + def permutations: Iterator[Array[A]] = mutable.ArraySeq.make(xs).permutations.map(_.toArray[A]) + + // we have another overload here, so we need to duplicate this method + /** Tests whether this array contains the given sequence at a given index. + * + * @param that the sequence to test + * @param offset the index where the sequence is searched. + * @return `true` if the sequence `that` is contained in this array at + * index `offset`, otherwise `false`. + */ + def startsWith[B >: A](that: IterableOnce[B], offset: Int = 0): Boolean = mutable.ArraySeq.make(xs).startsWith(that, offset) + + // we have another overload here, so we need to duplicate this method + /** Tests whether this array ends with the given sequence. + * + * @param that the sequence to test + * @return `true` if this array has `that` as a suffix, `false` otherwise. + */ + def endsWith[B >: A](that: Iterable[B]): Boolean = mutable.ArraySeq.make(xs).endsWith(that) +} diff --git a/tests/pos-special/stdlib/collection/BitSet.scala b/tests/pos-special/stdlib/collection/BitSet.scala new file mode 100644 index 000000000000..e8ca89806455 --- /dev/null +++ b/tests/pos-special/stdlib/collection/BitSet.scala @@ -0,0 +1,348 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import java.io.{ObjectInputStream, ObjectOutputStream} + +import scala.annotation.nowarn +import scala.collection.Stepper.EfficientSplit +import scala.collection.mutable.Builder + + +/** Base type of bitsets. + * + * This trait provides most of the operations of a `BitSet` independently of its representation. + * It is inherited by all concrete implementations of bitsets. + * + * @define bitsetinfo + * Bitsets are sets of non-negative integers which are represented as + * variable-size arrays of bits packed into 64-bit words. The lower bound of memory footprint of a bitset is + * determined by the largest number stored in it. + * @define coll bitset + * @define Coll `BitSet` + */ +trait BitSet extends SortedSet[Int] with BitSetOps[BitSet] { + override protected def fromSpecific(coll: IterableOnce[Int]): BitSet = bitSetFactory.fromSpecific(coll) + override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder + override def empty: BitSet = bitSetFactory.empty + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "BitSet" + override def unsorted: Set[Int] = this +} + +@SerialVersionUID(3L) +object BitSet extends SpecificIterableFactory[Int, BitSet] { + private[collection] final val ordMsg = "No implicit Ordering[${B}] found to build a SortedSet[${B}]. You may want to upcast to a Set[Int] first by calling `unsorted`." + private[collection] final val zipOrdMsg = "No implicit Ordering[${B}] found to build a SortedSet[(Int, ${B})]. You may want to upcast to a Set[Int] first by calling `unsorted`." + + def empty: BitSet = immutable.BitSet.empty + def newBuilder: Builder[Int, BitSet] = immutable.BitSet.newBuilder + def fromSpecific(it: IterableOnce[Int]): BitSet = immutable.BitSet.fromSpecific(it) + + @SerialVersionUID(3L) + private[collection] abstract class SerializationProxy(@transient protected val coll: BitSet) extends Serializable { + + @transient protected var elems: Array[Long] = _ + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + val nwords = coll.nwords + out.writeInt(nwords) + var i = 0 + while(i < nwords) { + out.writeLong(coll.word(i)) + i += 1 + } + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + val nwords = in.readInt() + elems = new Array[Long](nwords) + var i = 0 + while(i < nwords) { + elems(i) = in.readLong() + i += 1 + } + } + + protected[this] def readResolve(): Any + } +} + +/** Base implementation type of bitsets */ +trait BitSetOps[+C <: BitSet with BitSetOps[C]] + extends SortedSetOps[Int, SortedSet, C] { self => + import BitSetOps._ + + def bitSetFactory: SpecificIterableFactory[Int, C] + + def unsorted: Set[Int] + + final def ordering: Ordering[Int] = Ordering.Int + + /** The number of words (each with 64 bits) making up the set */ + protected[collection] def nwords: Int + + /** The words at index `idx`, or 0L if outside the range of the set + * '''Note:''' requires `idx >= 0` + */ + protected[collection] def word(idx: Int): Long + + /** Creates a new set of this kind from an array of longs + */ + protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): C + + def contains(elem: Int): Boolean = + 0 <= elem && (word(elem >> LogWL) & (1L << elem)) != 0L + + def iterator: Iterator[Int] = iteratorFrom(0) + + def iteratorFrom(start: Int): Iterator[Int] = new AbstractIterator[Int] { + private[this] var currentPos = if (start > 0) start >> LogWL else 0 + private[this] var currentWord = if (start > 0) word(currentPos) & (-1L << (start & (WordLength - 1))) else word(0) + final override def hasNext: Boolean = { + while (currentWord == 0) { + if (currentPos + 1 >= nwords) return false + currentPos += 1 + currentWord = word(currentPos) + } + true + } + final override def next(): Int = { + if (hasNext) { + val bitPos = java.lang.Long.numberOfTrailingZeros(currentWord) + currentWord &= currentWord - 1 + (currentPos << LogWL) + bitPos + } else Iterator.empty.next() + } + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = { + val st = scala.collection.convert.impl.BitSetStepper.from(this) + val r = + if (shape.shape == StepperShape.IntShape) st + else { + assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") + AnyStepper.ofParIntStepper(st) + } + r.asInstanceOf[S with EfficientSplit] + } + + override def size: Int = { + var s = 0 + var i = nwords + while (i > 0) { + i -= 1 + s += java.lang.Long.bitCount(word(i)) + } + s + } + + override def isEmpty: Boolean = 0 until nwords forall (i => word(i) == 0) + + @inline private[this] def smallestInt: Int = { + val thisnwords = nwords + var i = 0 + while(i < thisnwords) { + val currentWord = word(i) + if (currentWord != 0L) { + return java.lang.Long.numberOfTrailingZeros(currentWord) + (i * WordLength) + } + i += 1 + } + throw new UnsupportedOperationException("empty.smallestInt") + } + + @inline private[this] def largestInt: Int = { + var i = nwords - 1 + while(i >= 0) { + val currentWord = word(i) + if (currentWord != 0L) { + return ((i + 1) * WordLength) - java.lang.Long.numberOfLeadingZeros(currentWord) - 1 + } + i -= 1 + } + throw new UnsupportedOperationException("empty.largestInt") + } + + override def max[B >: Int](implicit ord: Ordering[B]): Int = + if (Ordering.Int eq ord) largestInt + else if (Ordering.Int isReverseOf ord) smallestInt + else super.max(ord) + + + override def min[B >: Int](implicit ord: Ordering[B]): Int = + if (Ordering.Int eq ord) smallestInt + else if (Ordering.Int isReverseOf ord) largestInt + else super.min(ord) + + override def foreach[U](f: Int => U): Unit = { + /* NOTE: while loops are significantly faster as of 2.11 and + one major use case of bitsets is performance. Also, there + is nothing to do when all bits are clear, so use that as + the inner loop condition. */ + var i = 0 + while (i < nwords) { + var w = word(i) + var j = i * WordLength + while (w != 0L) { + if ((w&1L) == 1L) f(j) + w = w >>> 1 + j += 1 + } + i += 1 + } + } + + /** Creates a bit mask for this set as a new array of longs + */ + def toBitMask: Array[Long] = { + val a = new Array[Long](nwords) + var i = a.length + while(i > 0) { + i -= 1 + a(i) = word(i) + } + a + } + + def rangeImpl(from: Option[Int], until: Option[Int]): C = { + val a = coll.toBitMask + val len = a.length + if (from.isDefined) { + val f = from.get + val w = f >> LogWL + val b = f & (WordLength - 1) + if (w >= 0) { + java.util.Arrays.fill(a, 0, math.min(w, len), 0) + if (b > 0 && w < len) a(w) &= ~((1L << b) - 1) + } + } + if (until.isDefined) { + val u = until.get + val w = u >> LogWL + val b = u & (WordLength - 1) + if (w < len) { + java.util.Arrays.fill(a, math.max(w + 1, 0), len, 0) + if (w >= 0) a(w) &= (1L << b) - 1 + } + } + coll.fromBitMaskNoCopy(a) + } + + override def concat(other: collection.IterableOnce[Int]): C = other match { + case otherBitset: BitSet => + val len = coll.nwords max otherBitset.nwords + val words = new Array[Long](len) + for (idx <- 0 until len) + words(idx) = this.word(idx) | otherBitset.word(idx) + fromBitMaskNoCopy(words) + case _ => super.concat(other) + } + + override def intersect(other: Set[Int]): C = other match { + case otherBitset: BitSet => + val len = coll.nwords min otherBitset.nwords + val words = new Array[Long](len) + for (idx <- 0 until len) + words(idx) = this.word(idx) & otherBitset.word(idx) + fromBitMaskNoCopy(words) + case _ => super.intersect(other) + } + + abstract override def diff(other: Set[Int]): C = other match { + case otherBitset: BitSet => + val len = coll.nwords + val words = new Array[Long](len) + for (idx <- 0 until len) + words(idx) = this.word(idx) & ~otherBitset.word(idx) + fromBitMaskNoCopy(words) + case _ => super.diff(other) + } + + /** Computes the symmetric difference of this bitset and another bitset by performing + * a bitwise "exclusive-or". + * + * @param other the other bitset to take part in the symmetric difference. + * @return a bitset containing those bits of this + * bitset or the other bitset that are not contained in both bitsets. + */ + def xor(other: BitSet): C = { + val len = coll.nwords max other.nwords + val words = new Array[Long](len) + for (idx <- 0 until len) + words(idx) = coll.word(idx) ^ other.word(idx) + coll.fromBitMaskNoCopy(words) + } + + @`inline` final def ^ (other: BitSet): C = xor(other) + + /** + * Builds a new bitset by applying a function to all elements of this bitset + * @param f the function to apply to each element. + * @return a new bitset resulting from applying the given function ''f'' to + * each element of this bitset and collecting the results + */ + def map(f: Int => Int): C = fromSpecific(new View.Map(this, f)) + + def flatMap(f: Int => IterableOnce[Int]): C = fromSpecific(new View.FlatMap(this, f)) + + def collect(pf: PartialFunction[Int, Int]): C = fromSpecific(super[SortedSetOps].collect(pf)) + + override def partition(p: Int => Boolean): (C, C) = { + val left = filter(p) + (left, this &~ left) + } +} + +object BitSetOps { + + /* Final vals can sometimes be inlined as constants (faster) */ + private[collection] final val LogWL = 6 + private[collection] final val WordLength = 64 + private[collection] final val MaxSize = (Int.MaxValue >> LogWL) + 1 + + private[collection] def updateArray(elems: Array[Long], idx: Int, w: Long): Array[Long] = { + var len = elems.length + while (len > 0 && (elems(len - 1) == 0L || w == 0L && idx == len - 1)) len -= 1 + var newlen = len + if (idx >= newlen && w != 0L) newlen = idx + 1 + val newelems = new Array[Long](newlen) + Array.copy(elems, 0, newelems, 0, len) + if (idx < newlen) newelems(idx) = w + else assert(w == 0L) + newelems + } + + private[collection] def computeWordForFilter(pred: Int => Boolean, isFlipped: Boolean, oldWord: Long, wordIndex: Int): Long = + if (oldWord == 0L) 0L else { + var w = oldWord + val trailingZeroes = java.lang.Long.numberOfTrailingZeros(w) + var jmask = 1L << trailingZeroes + var j = wordIndex * BitSetOps.WordLength + trailingZeroes + val maxJ = (wordIndex + 1) * BitSetOps.WordLength - java.lang.Long.numberOfLeadingZeros(w) + while (j != maxJ) { + if ((w & jmask) != 0L) { + if (pred(j) == isFlipped) { + // j did not pass the filter here + w = w & ~jmask + } + } + jmask = jmask << 1 + j += 1 + } + w + } +} diff --git a/tests/pos-special/stdlib/collection/BufferedIterator.scala b/tests/pos-special/stdlib/collection/BufferedIterator.scala new file mode 100644 index 000000000000..bc35ee0a25da --- /dev/null +++ b/tests/pos-special/stdlib/collection/BufferedIterator.scala @@ -0,0 +1,32 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + + +/** Buffered iterators are iterators which provide a method `head` + * that inspects the next element without discarding it. + */ +trait BufferedIterator[+A] extends Iterator[A] { + + /** Returns next element of iterator without advancing beyond it. + */ + def head: A + + /** Returns an option of the next element of an iterator without advancing beyond it. + * @return the next element of this iterator if it has a next element + * `None` if it does not + */ + def headOption : Option[A] = if (hasNext) Some(head) else None + + override def buffered: this.type = this +} diff --git a/tests/pos-special/stdlib/collection/BuildFrom.scala b/tests/pos-special/stdlib/collection/BuildFrom.scala new file mode 100644 index 000000000000..bc9c49d9493c --- /dev/null +++ b/tests/pos-special/stdlib/collection/BuildFrom.scala @@ -0,0 +1,122 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.implicitNotFound +import scala.collection.mutable.Builder +import scala.collection.immutable.WrappedString +import scala.reflect.ClassTag + +/** Builds a collection of type `C` from elements of type `A` when a source collection of type `From` is available. + * Implicit instances of `BuildFrom` are available for all collection types. + * + * @tparam From Type of source collection + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + */ +@implicitNotFound(msg = "Cannot construct a collection of type ${C} with elements of type ${A} based on a collection of type ${From}.") +trait BuildFrom[-From, -A, +C] extends Any { self => + def fromSpecific(from: From)(it: IterableOnce[A]): C + + /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. + * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ + def newBuilder(from: From): Builder[A, C] + + @deprecated("Use newBuilder() instead of apply()", "2.13.0") + @`inline` def apply(from: From): Builder[A, C] = newBuilder(from) + + /** Partially apply a BuildFrom to a Factory */ + def toFactory(from: From): Factory[A, C] = new Factory[A, C] { + def fromSpecific(it: IterableOnce[A]): C = self.fromSpecific(from)(it) + def newBuilder: Builder[A, C] = self.newBuilder(from) + } +} + +object BuildFrom extends BuildFromLowPriority1 { + + /** Build the source collection type from a MapOps */ + implicit def buildFromMapOps[CC[X, Y] <: Map[X, Y] with MapOps[X, Y, CC, _], K0, V0, K, V]: BuildFrom[CC[K0, V0] with Map[K0, V0], (K, V), CC[K, V] with Map[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] { + //TODO: Reuse a prototype instance + def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: MapOps[K0, V0, CC, _]).mapFactory.newBuilder[K, V] + def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]): CC[K, V] = (from: MapOps[K0, V0, CC, _]).mapFactory.from(it) + } + + /** Build the source collection type from a SortedMapOps */ + implicit def buildFromSortedMapOps[CC[X, Y] <: SortedMap[X, Y] with SortedMapOps[X, Y, CC, _], K0, V0, K : Ordering, V]: BuildFrom[CC[K0, V0] with SortedMap[K0, V0], (K, V), CC[K, V] with SortedMap[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] { + def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.newBuilder[K, V] + def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]): CC[K, V] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.from(it) + } + + implicit def buildFromBitSet[C <: BitSet with BitSetOps[C]]: BuildFrom[C, Int, C] = + new BuildFrom[C, Int, C] { + def fromSpecific(from: C)(it: IterableOnce[Int]): C = from.bitSetFactory.fromSpecific(it) + def newBuilder(from: C): Builder[Int, C] = from.bitSetFactory.newBuilder + } + + implicit val buildFromString: BuildFrom[String, Char, String] = + new BuildFrom[String, Char, String] { + def fromSpecific(from: String)(it: IterableOnce[Char]): String = Factory.stringFactory.fromSpecific(it) + def newBuilder(from: String): Builder[Char, String] = Factory.stringFactory.newBuilder + } + + implicit val buildFromWrappedString: BuildFrom[WrappedString, Char, WrappedString] = + new BuildFrom[WrappedString, Char, WrappedString] { + def fromSpecific(from: WrappedString)(it: IterableOnce[Char]): WrappedString = WrappedString.fromSpecific(it) + def newBuilder(from: WrappedString): mutable.Builder[Char, WrappedString] = WrappedString.newBuilder + } + + implicit def buildFromArray[A : ClassTag]: BuildFrom[Array[_], A, Array[A]] = + new BuildFrom[Array[_], A, Array[A]] { + def fromSpecific(from: Array[_])(it: IterableOnce[A]): Array[A] = Factory.arrayFactory[A].fromSpecific(it) + def newBuilder(from: Array[_]): Builder[A, Array[A]] = Factory.arrayFactory[A].newBuilder + } + + implicit def buildFromView[A, B]: BuildFrom[View[A], B, View[B]] = + new BuildFrom[View[A], B, View[B]] { + def fromSpecific(from: View[A])(it: IterableOnce[B]): View[B] = View.from(it) + def newBuilder(from: View[A]): Builder[B, View[B]] = View.newBuilder + } + +} + +trait BuildFromLowPriority1 extends BuildFromLowPriority2 { + + /** Build the source collection type from an Iterable with SortedOps */ + // Restating the upper bound of CC in the result type seems redundant, but it serves to prune the + // implicit search space for faster compilation and reduced change of divergence. See the compilation + // test in test/junit/scala/collection/BuildFromTest.scala and discussion in https://github.com/scala/scala/pull/10209 + implicit def buildFromSortedSetOps[CC[X] <: SortedSet[X] with SortedSetOps[X, CC, _], A0, A : Ordering]: BuildFrom[CC[A0] with SortedSet[A0], A, CC[A] with SortedSet[A]] = new BuildFrom[CC[A0], A, CC[A]] { + def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.newBuilder[A] + def fromSpecific(from: CC[A0])(it: IterableOnce[A]): CC[A] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.from(it) + } + + implicit def fallbackStringCanBuildFrom[A]: BuildFrom[String, A, immutable.IndexedSeq[A]] = + new BuildFrom[String, A, immutable.IndexedSeq[A]] { + def fromSpecific(from: String)(it: IterableOnce[A]): immutable.IndexedSeq[A] = immutable.IndexedSeq.from(it) + def newBuilder(from: String): Builder[A, immutable.IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A] + } +} + +trait BuildFromLowPriority2 { + /** Build the source collection type from an IterableOps */ + implicit def buildFromIterableOps[CC[X] <: Iterable[X] with IterableOps[X, CC, _], A0, A]: BuildFrom[CC[A0], A, CC[A]] = new BuildFrom[CC[A0], A, CC[A]] { + //TODO: Reuse a prototype instance + def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: IterableOps[A0, CC, _]).iterableFactory.newBuilder[A] + def fromSpecific(from: CC[A0])(it: IterableOnce[A]): CC[A] = (from: IterableOps[A0, CC, _]).iterableFactory.from(it) + } + + implicit def buildFromIterator[A]: BuildFrom[Iterator[_], A, Iterator[A]] = new BuildFrom[Iterator[_], A, Iterator[A]] { + def newBuilder(from: Iterator[_]): mutable.Builder[A, Iterator[A]] = Iterator.newBuilder + def fromSpecific(from: Iterator[_])(it: IterableOnce[A]): Iterator[A] = Iterator.from(it) + } +} diff --git a/tests/pos-special/stdlib/collection/DefaultMap.scala b/tests/pos-special/stdlib/collection/DefaultMap.scala new file mode 100644 index 000000000000..cbc61d8c0268 --- /dev/null +++ b/tests/pos-special/stdlib/collection/DefaultMap.scala @@ -0,0 +1,21 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + + +/** A default map which builds a default `immutable.Map` implementation for all + * transformations. + */ +@deprecated("DefaultMap is no longer necessary; extend Map directly", "2.13.0") +trait DefaultMap[K, +V] extends Map[K, V] diff --git a/tests/pos-special/stdlib/collection/Factory.scala b/tests/pos-special/stdlib/collection/Factory.scala new file mode 100644 index 000000000000..2b15f1cc15d1 --- /dev/null +++ b/tests/pos-special/stdlib/collection/Factory.scala @@ -0,0 +1,784 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.collection.immutable.NumericRange +import scala.language.implicitConversions +import scala.collection.mutable.Builder +import scala.annotation.unchecked.uncheckedVariance +import scala.reflect.ClassTag + +/** + * A factory that builds a collection of type `C` with elements of type `A`. + * + * This is a general form of any factory ([[IterableFactory]], + * [[SortedIterableFactory]], [[MapFactory]] and [[SortedMapFactory]]) whose + * element type is fixed. + * + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + */ +trait Factory[-A, +C] extends Any { + + /** + * @return A collection of type `C` containing the same elements + * as the source collection `it`. + * @param it Source collection + */ + def fromSpecific(it: IterableOnce[A]): C + + /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. + * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ + def newBuilder: Builder[A, C] +} + +object Factory { + + implicit val stringFactory: Factory[Char, String] = new StringFactory + @SerialVersionUID(3L) + private class StringFactory extends Factory[Char, String] with Serializable { + def fromSpecific(it: IterableOnce[Char]): String = { + val b = new mutable.StringBuilder(scala.math.max(0, it.knownSize)) + b ++= it + b.result() + } + def newBuilder: Builder[Char, String] = new mutable.StringBuilder() + } + + implicit def arrayFactory[A: ClassTag]: Factory[A, Array[A]] = new ArrayFactory[A] + @SerialVersionUID(3L) + private class ArrayFactory[A: ClassTag] extends Factory[A, Array[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): Array[A] = { + val b = newBuilder + b.sizeHint(scala.math.max(0, it.knownSize)) + b ++= it + b.result() + } + def newBuilder: Builder[A, Array[A]] = mutable.ArrayBuilder.make[A] + } + +} + +/** Base trait for companion objects of unconstrained collection types that may require + * multiple traversals of a source collection to build a target collection `CC`. + * + * @tparam CC Collection type constructor (e.g. `List`) + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait IterableFactory[+CC[_]] extends Serializable { + + /** Creates a target $coll from an existing source collection + * + * @param source Source collection + * @tparam A the type of the collection’s elements + * @return a new $coll with the elements of `source` + */ + def from[A](source: IterableOnce[A]): CC[A] + + /** An empty collection + * @tparam A the type of the ${coll}'s elements + */ + def empty[A]: CC[A] + + /** Creates a $coll with the specified elements. + * @tparam A the type of the ${coll}'s elements + * @param elems the elements of the created $coll + * @return a new $coll with elements `elems` + */ + def apply[A](elems: A*): CC[A] = from(elems) + + /** Produces a $coll containing repeated applications of a function to a start value. + * + * @param start the start value of the $coll + * @param len the number of elements contained in the $coll + * @param f the function that's repeatedly applied + * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[A](start: A, len: Int)(f: A => A): CC[A] = from(new View.Iterate(start, len)(f)) + + /** Produces a $coll that uses a function `f` to produce elements of type `A` + * and update an internal state of type `S`. + * + * @param init State initial value + * @param f Computes the next element (or returns `None` to signal + * the end of the collection) + * @tparam A Type of the elements + * @tparam S Type of the internal state + * @return a $coll that produces elements using `f` until `f` returns `None` + */ + def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = from(new View.Unfold(init)(f)) + + /** Produces a $coll containing a sequence of increasing of integers. + * + * @param start the first element of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @return a $coll with values `start, start + 1, ..., end - 1` + */ + def range[A : Integral](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one)) + + /** Produces a $coll containing equally spaced values in some integer interval. + * @param start the start value of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @param step the difference between successive elements of the $coll (must be positive or negative) + * @return a $coll with values `start, start + step, ...` up to, but excluding `end` + */ + def range[A : Integral](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step)) + + /** + * @return A builder for $Coll objects. + * @tparam A the type of the ${coll}’s elements + */ + def newBuilder[A]: Builder[A, CC[A]] + + /** Produces a $coll containing the results of some element computation a number of times. + * @param n the number of elements contained in the $coll. + * @param elem the element computation + * @return A $coll that contains the results of `n` evaluations of `elem`. + */ + def fill[A](n: Int)(elem: => A): CC[A] = from(new View.Fill(n)(elem)) + + /** Produces a two-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance] = fill(n1)(fill(n2)(elem)) + + /** Produces a three-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance] = fill(n1)(fill(n2, n3)(elem)) + + /** Produces a four-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4)(elem)) + + /** Produces a five-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4, n5)(elem)) + + /** Produces a $coll containing values of a given function over a range of integer values starting from 0. + * @param n The number of elements in the $coll + * @param f The function computing element values + * @return A $coll consisting of elements `f(0), ..., f(n -1)` + */ + def tabulate[A](n: Int)(f: Int => A): CC[A] = from(new View.Tabulate(n)(f)) + + /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2)` + * for `0 <= i1 < n1` and `0 <= i2 < n2`. + */ + def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) + + /** Concatenates all argument collections into a single $coll. + * + * @param xss the collections that are to be concatenated. + * @return the concatenation of all the collections. + */ + def concat[A](xss: Iterable[A]*): CC[A] = { + from(xss.foldLeft(View.empty[A])(_ ++ _)) + } + + implicit def iterableFactory[A]: Factory[A, CC[A]] = IterableFactory.toFactory(this) +} + +object IterableFactory { + + /** + * Fixes the element type of `factory` to `A` + * @param factory The factory to fix the element type + * @tparam A Type of elements + * @tparam CC Collection type constructor of the factory (e.g. `Seq`, `List`) + * @return A [[Factory]] that uses the given `factory` to build a collection of elements + * of type `A` + */ + implicit def toFactory[A, CC[_]](factory: IterableFactory[CC]): Factory[A, CC[A]] = new ToFactory[A, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[A, CC[_]](factory: IterableFactory[CC]) extends Factory[A, CC[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] + } + + implicit def toBuildFrom[A, CC[_]](factory: IterableFactory[CC]): BuildFrom[Any, A, CC[A]] = + new BuildFrom[Any, A, CC[A]] { + def fromSpecific(from: Any)(it: IterableOnce[A]) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder + } + + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: IterableFactory[CC]) extends IterableFactory[CC] { + override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) + def empty[A]: CC[A] = delegate.empty + def from[E](it: IterableOnce[E]): CC[E] = delegate.from(it) + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] + } +} + +/** + * @tparam CC Collection type constructor (e.g. `List`) + */ +trait SeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends IterableFactory[CC] { + import SeqFactory.UnapplySeqWrapper + final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? +} + +object SeqFactory { + @SerialVersionUID(3L) + class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: SeqFactory[CC]) extends SeqFactory[CC] { + override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) + def empty[A]: CC[A] = delegate.empty + def from[E](it: IterableOnce[E]): CC[E] = delegate.from(it) + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] + } + + final class UnapplySeqWrapper[A](private val c: SeqOps[A, Seq, Seq[A]]) extends AnyVal { + def isEmpty: false = false + def get: UnapplySeqWrapper[A] = this + def lengthCompare(len: Int): Int = c.lengthCompare(len) + def apply(i: Int): A = c(i) + def drop(n: Int): scala.Seq[A] = c match { + case seq: scala.Seq[A] => seq.drop(n) + case _ => c.view.drop(n).toSeq + } + def toSeq: scala.Seq[A] = c.toSeq + } +} + +trait StrictOptimizedSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends SeqFactory[CC] { + + override def fill[A](n: Int)(elem: => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += elem + i += 1 + } + b.result() + } + + override def tabulate[A](n: Int)(f: Int => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += f(i) + i += 1 + } + b.result() + } + + override def concat[A](xss: Iterable[A]*): CC[A] = { + val b = newBuilder[A] + val knownSizes = xss.view.map(_.knownSize) + if (knownSizes forall (_ >= 0)) { + b.sizeHint(knownSizes.sum) + } + for (xs <- xss) b ++= xs + b.result() + } + +} + +/** + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait SpecificIterableFactory[-A, +C] extends Factory[A, C] { + def empty: C + def apply(xs: A*): C = fromSpecific(xs) + def fill(n: Int)(elem: => A): C = fromSpecific(new View.Fill(n)(elem)) + def newBuilder: Builder[A, C] + + implicit def specificIterableFactory: Factory[A, C] = this +} + +/** + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait MapFactory[+CC[_, _]] extends Serializable { + + /** + * An empty Map + */ + def empty[K, V]: CC[K, V] + + /** + * A collection of type Map generated from given iterable object. + */ + def from[K, V](it: IterableOnce[(K, V)]): CC[K, V] + + /** + * A collection of type Map that contains given key/value bindings. + */ + def apply[K, V](elems: (K, V)*): CC[K, V] = from(elems) + + /** + * The default builder for Map objects. + */ + def newBuilder[K, V]: Builder[(K, V), CC[K, V]] + + /** + * The default Factory instance for maps. + */ + implicit def mapFactory[K, V]: Factory[(K, V), CC[K, V]] = MapFactory.toFactory(this) +} + +object MapFactory { + + /** + * Fixes the key and value types of `factory` to `K` and `V`, respectively + * @param factory The factory to fix the key and value types + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor of the factory (e.g. `Map`, `HashMap`, etc.) + * @return A [[Factory]] that uses the given `factory` to build a map with keys of type `K` + * and values of type `V` + */ + implicit def toFactory[K, V, CC[_, _]](factory: MapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[K, V, CC[_, _]](factory: MapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = factory.from[K, V](it) + def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] + } + + implicit def toBuildFrom[K, V, CC[_, _]](factory: MapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = + new BuildFrom[Any, (K, V), CC[K, V]] { + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder[K, V] + } + + @SerialVersionUID(3L) + class Delegate[C[_, _]](delegate: MapFactory[C]) extends MapFactory[C] { + override def apply[K, V](elems: (K, V)*): C[K, V] = delegate.apply(elems: _*) + def from[K, V](it: IterableOnce[(K, V)]): C[K, V] = delegate.from(it) + def empty[K, V]: C[K, V] = delegate.empty + def newBuilder[K, V]: Builder[(K, V), C[K, V]] = delegate.newBuilder + } +} + +/** Base trait for companion objects of collections that require an implicit evidence. + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + * @tparam Ev Unary type constructor for the implicit evidence required for an element type + * (typically `Ordering` or `ClassTag`) + * + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait EvidenceIterableFactory[+CC[_], Ev[_]] extends Serializable { + + def from[E : Ev](it: IterableOnce[E]): CC[E] + + def empty[A : Ev]: CC[A] + + def apply[A : Ev](xs: A*): CC[A] = from(xs) + + /** Produces a $coll containing the results of some element computation a number of times. + * @param n the number of elements contained in the $coll. + * @param elem the element computation + * @return A $coll that contains the results of `n` evaluations of `elem`. + */ + def fill[A : Ev](n: Int)(elem: => A): CC[A] = from(new View.Fill(n)(elem)) + + /** Produces a $coll containing values of a given function over a range of integer values starting from 0. + * @param n The number of elements in the $coll + * @param f The function computing element values + * @return A $coll consisting of elements `f(0), ..., f(n -1)` + */ + def tabulate[A : Ev](n: Int)(f: Int => A): CC[A] = from(new View.Tabulate(n)(f)) + + /** Produces a $coll containing repeated applications of a function to a start value. + * + * @param start the start value of the $coll + * @param len the number of elements contained in the $coll + * @param f the function that's repeatedly applied + * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[A : Ev](start: A, len: Int)(f: A => A): CC[A] = from(new View.Iterate(start, len)(f)) + + /** Produces a $coll that uses a function `f` to produce elements of type `A` + * and update an internal state of type `S`. + * + * @param init State initial value + * @param f Computes the next element (or returns `None` to signal + * the end of the collection) + * @tparam A Type of the elements + * @tparam S Type of the internal state + * @return a $coll that produces elements using `f` until `f` returns `None` + */ + def unfold[A : Ev, S](init: S)(f: S => Option[(A, S)]): CC[A] = from(new View.Unfold(init)(f)) + + def newBuilder[A : Ev]: Builder[A, CC[A]] + + implicit def evidenceIterableFactory[A : Ev]: Factory[A, CC[A]] = EvidenceIterableFactory.toFactory(this) +} + +object EvidenceIterableFactory { + + /** + * Fixes the element type of `factory` to `A` + * @param factory The factory to fix the element type + * @tparam A Type of elements + * @tparam CC Collection type constructor of the factory (e.g. `TreeSet`) + * @tparam Ev Type constructor of the evidence (usually `Ordering` or `ClassTag`) + * @return A [[Factory]] that uses the given `factory` to build a collection of elements + * of type `A` + */ + implicit def toFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): Factory[A, CC[A]] = new ToFactory[Ev, A, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends Factory[A, CC[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] + } + + implicit def toBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): BuildFrom[Any, A, CC[A]] = new EvidenceIterableFactoryToBuildFrom(factory) + private class EvidenceIterableFactoryToBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends BuildFrom[Any, A, CC[A]] { + def fromSpecific(from: Any)(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def newBuilder(from: Any): Builder[A, CC[A]] = factory.newBuilder[A] + } + + @SerialVersionUID(3L) + class Delegate[CC[_], Ev[_]](delegate: EvidenceIterableFactory[CC, Ev]) extends EvidenceIterableFactory[CC, Ev] { + override def apply[A: Ev](xs: A*): CC[A] = delegate.apply(xs: _*) + def empty[A : Ev]: CC[A] = delegate.empty + def from[E : Ev](it: IterableOnce[E]): CC[E] = delegate.from(it) + def newBuilder[A : Ev]: Builder[A, CC[A]] = delegate.newBuilder[A] + } +} + +/** Base trait for companion objects of collections that require an implicit `Ordering`. + * @tparam CC Collection type constructor (e.g. `SortedSet`) + */ +trait SortedIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, Ordering] + +object SortedIterableFactory { + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, Ordering]) + extends EvidenceIterableFactory.Delegate[CC, Ordering](delegate) with SortedIterableFactory[CC] +} + +/** Base trait for companion objects of collections that require an implicit `ClassTag`. + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + */ +trait ClassTagIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, ClassTag] { + + @`inline` private[this] implicit def ccClassTag[X]: ClassTag[CC[X]] = + ClassTag.AnyRef.asInstanceOf[ClassTag[CC[X]]] // Good enough for boxed vs primitive arrays + + /** Produces a $coll containing a sequence of increasing of integers. + * + * @param start the first element of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @return a $coll with values `start, start + 1, ..., end - 1` + */ + def range[A : Integral : ClassTag](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one)) + + /** Produces a $coll containing equally spaced values in some integer interval. + * @param start the start value of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @param step the difference between successive elements of the $coll (must be positive or negative) + * @return a $coll with values `start, start + step, ...` up to, but excluding `end` + */ + def range[A : Integral : ClassTag](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step)) + + /** Produces a two-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance] = fill(n1)(fill(n2)(elem)) + + /** Produces a three-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance] = fill(n1)(fill(n2, n3)(elem)) + + /** Produces a four-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4)(elem)) + + /** Produces a five-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4, n5)(elem)) + + /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2)` + * for `0 <= i1 < n1` and `0 <= i2 < n2`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) +} + +object ClassTagIterableFactory { + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, ClassTag]) + extends EvidenceIterableFactory.Delegate[CC, ClassTag](delegate) with ClassTagIterableFactory[CC] + + /** An IterableFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be + * sound depending on the use of the `ClassTag` by the collection implementation. */ + @SerialVersionUID(3L) + class AnyIterableDelegate[CC[_]](delegate: ClassTagIterableFactory[CC]) extends IterableFactory[CC] { + def empty[A]: CC[A] = delegate.empty(ClassTag.Any).asInstanceOf[CC[A]] + def from[A](it: IterableOnce[A]): CC[A] = delegate.from[Any](it)(ClassTag.Any).asInstanceOf[CC[A]] + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder(ClassTag.Any).asInstanceOf[Builder[A, CC[A]]] + override def apply[A](elems: A*): CC[A] = delegate.apply[Any](elems: _*)(ClassTag.Any).asInstanceOf[CC[A]] + override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = delegate.iterate[A](start, len)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = delegate.unfold[A, S](init)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end)(i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A, step: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end, step)(i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def fill[A](n: Int)(elem: => A): CC[A] = delegate.fill[Any](n)(elem)(ClassTag.Any).asInstanceOf[CC[A]] + override def tabulate[A](n: Int)(f: Int => A): CC[A] = delegate.tabulate[Any](n)(f)(ClassTag.Any).asInstanceOf[CC[A]] + } +} + +/** + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + */ +trait ClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagIterableFactory[CC] { + import SeqFactory.UnapplySeqWrapper + final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? +} + +object ClassTagSeqFactory { + @SerialVersionUID(3L) + class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC]) + extends ClassTagIterableFactory.Delegate[CC](delegate) with ClassTagSeqFactory[CC] + + /** A SeqFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be + * sound depending on the use of the `ClassTag` by the collection implementation. */ + @SerialVersionUID(3L) + class AnySeqDelegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC]) + extends ClassTagIterableFactory.AnyIterableDelegate[CC](delegate) with SeqFactory[CC] +} + +trait StrictOptimizedClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagSeqFactory[CC] { + + override def fill[A : ClassTag](n: Int)(elem: => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += elem + i += 1 + } + b.result() + } + + override def tabulate[A : ClassTag](n: Int)(f: Int => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += f(i) + i += 1 + } + b.result() + } + +} + +/** + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait SortedMapFactory[+CC[_, _]] extends Serializable { + + def empty[K : Ordering, V]: CC[K, V] + + def from[K : Ordering, V](it: IterableOnce[(K, V)]): CC[K, V] + + def apply[K : Ordering, V](elems: (K, V)*): CC[K, V] = from(elems) + + def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] + + implicit def sortedMapFactory[K : Ordering, V]: Factory[(K, V), CC[K, V]] = SortedMapFactory.toFactory(this) + +} + +object SortedMapFactory { + + /** + * Implicit conversion that fixes the key and value types of `factory` to `K` and `V`, + * respectively. + * + * @param factory The factory to fix the key and value types + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor of the factory (e.g. `TreeMap`) + * @return A [[Factory]] that uses the given `factory` to build a map with keys of + * type `K` and values of type `V` + */ + implicit def toFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = factory.from[K, V](it) + def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] + } + + implicit def toBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = new SortedMapFactoryToBuildFrom(factory) + private class SortedMapFactoryToBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends BuildFrom[Any, (K, V), CC[K, V]] { + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder[K, V] + } + + @SerialVersionUID(3L) + class Delegate[CC[_, _]](delegate: SortedMapFactory[CC]) extends SortedMapFactory[CC] { + override def apply[K: Ordering, V](elems: (K, V)*): CC[K, V] = delegate.apply(elems: _*) + def from[K : Ordering, V](it: IterableOnce[(K, V)]): CC[K, V] = delegate.from(it) + def empty[K : Ordering, V]: CC[K, V] = delegate.empty + def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] = delegate.newBuilder + } +} diff --git a/tests/pos-special/stdlib/collection/Hashing.scala b/tests/pos-special/stdlib/collection/Hashing.scala new file mode 100644 index 000000000000..4e1fd872b8b5 --- /dev/null +++ b/tests/pos-special/stdlib/collection/Hashing.scala @@ -0,0 +1,62 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + + +protected[collection] object Hashing { + + def elemHashCode(key: Any): Int = key.## + + def improve(hcode: Int): Int = { + var h: Int = hcode + ~(hcode << 9) + h = h ^ (h >>> 14) + h = h + (h << 4) + h ^ (h >>> 10) + } + + def computeHash(key: Any): Int = + improve(elemHashCode(key)) + + /** + * Utility method to keep a subset of all bits in a given bitmap + * + * Example + * bitmap (binary): 00000001000000010000000100000001 + * keep (binary): 1010 + * result (binary): 00000001000000000000000100000000 + * + * @param bitmap the bitmap + * @param keep a bitmask containing which bits to keep + * @return the original bitmap with all bits where keep is not 1 set to 0 + */ + def keepBits(bitmap: Int, keep: Int): Int = { + var result = 0 + var current = bitmap + var kept = keep + while (kept != 0) { + // lowest remaining bit in current + val lsb = current ^ (current & (current - 1)) + if ((kept & 1) != 0) { + // mark bit in result bitmap + result |= lsb + } + // clear lowest remaining one bit in abm + current &= ~lsb + // look at the next kept bit + kept >>>= 1 + } + result + } + +} diff --git a/tests/pos-special/stdlib/collection/IndexedSeq.scala b/tests/pos-special/stdlib/collection/IndexedSeq.scala index 6e8e2bd0dc66..a82d5384779a 100644 --- a/tests/pos-special/stdlib/collection/IndexedSeq.scala +++ b/tests/pos-special/stdlib/collection/IndexedSeq.scala @@ -17,7 +17,6 @@ import scala.annotation.{nowarn, tailrec} import scala.collection.Searching.{Found, InsertionPoint, SearchResult} import scala.collection.Stepper.EfficientSplit import scala.math.Ordering -import language.experimental.captureChecking /** Base trait for indexed sequences that have efficient `apply` and `length` */ trait IndexedSeq[+A] extends Seq[A] @@ -104,7 +103,7 @@ trait IndexedSeqOps[+A, +CC[_], +C] extends Any with SeqOps[A, CC, C] { self => override def knownSize: Int = length - override final def lengthCompare(that: Iterable[_]^): Int = { + override final def lengthCompare(that: Iterable[_]): Int = { val res = that.sizeCompare(length) // can't just invert the result, because `-Int.MinValue == Int.MinValue` if (res == Int.MinValue) 1 else -res diff --git a/tests/pos-special/stdlib/collection/IndexedSeqView.scala b/tests/pos-special/stdlib/collection/IndexedSeqView.scala new file mode 100644 index 000000000000..737f032d2060 --- /dev/null +++ b/tests/pos-special/stdlib/collection/IndexedSeqView.scala @@ -0,0 +1,180 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.nowarn + + +/** View defined in terms of indexing a range */ +trait IndexedSeqView[+A] extends IndexedSeqOps[A, View, View[A]] with SeqView[A] { self => + + override def view: IndexedSeqView[A] = this + + @deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0") + override def view(from: Int, until: Int): IndexedSeqView[A] = view.slice(from, until) + + override def iterator: Iterator[A] = new IndexedSeqView.IndexedSeqViewIterator(this) + override def reverseIterator: Iterator[A] = new IndexedSeqView.IndexedSeqViewReverseIterator(this) + + override def appended[B >: A](elem: B): IndexedSeqView[B] = new IndexedSeqView.Appended(this, elem) + override def prepended[B >: A](elem: B): IndexedSeqView[B] = new IndexedSeqView.Prepended(elem, this) + override def take(n: Int): IndexedSeqView[A] = new IndexedSeqView.Take(this, n) + override def takeRight(n: Int): IndexedSeqView[A] = new IndexedSeqView.TakeRight(this, n) + override def drop(n: Int): IndexedSeqView[A] = new IndexedSeqView.Drop(this, n) + override def dropRight(n: Int): IndexedSeqView[A] = new IndexedSeqView.DropRight(this, n) + override def map[B](f: A => B): IndexedSeqView[B] = new IndexedSeqView.Map(this, f) + override def reverse: IndexedSeqView[A] = new IndexedSeqView.Reverse(this) + override def slice(from: Int, until: Int): IndexedSeqView[A] = new IndexedSeqView.Slice(this, from, until) + override def tapEach[U](f: A => U): IndexedSeqView[A] = new IndexedSeqView.Map(this, { (a: A) => f(a); a}) + + def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new IndexedSeqView.Concat(this, suffix) + def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new IndexedSeqView.Concat(this, suffix) + def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new IndexedSeqView.Concat(prefix, this) + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "IndexedSeqView" +} + +object IndexedSeqView { + + @SerialVersionUID(3L) + private[collection] class IndexedSeqViewIterator[A](self: IndexedSeqView[A]) extends AbstractIterator[A] with Serializable { + private[this] var current = 0 + private[this] var remainder = self.length + override def knownSize: Int = remainder + @inline private[this] def _hasNext: Boolean = remainder > 0 + def hasNext: Boolean = _hasNext + def next(): A = + if (_hasNext) { + val r = self(current) + current += 1 + remainder -= 1 + r + } else Iterator.empty.next() + + override def drop(n: Int): Iterator[A] = { + if (n > 0) { + current += n + remainder = Math.max(0, remainder - n) + } + this + } + + override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { + + def formatRange(value : Int) : Int = if (value < 0) 0 else if (value > remainder) remainder else value + + val formatFrom = formatRange(from) + val formatUntil = formatRange(until) + remainder = Math.max(0, formatUntil - formatFrom) + current = current + formatFrom + this + } + } + @SerialVersionUID(3L) + private[collection] class IndexedSeqViewReverseIterator[A](self: IndexedSeqView[A]) extends AbstractIterator[A] with Serializable { + private[this] var remainder = self.length + private[this] var pos = remainder - 1 + @inline private[this] def _hasNext: Boolean = remainder > 0 + def hasNext: Boolean = _hasNext + def next(): A = + if (_hasNext) { + val r = self(pos) + pos -= 1 + remainder -= 1 + r + } else Iterator.empty.next() + + // from < 0 means don't move pos, until < 0 means don't limit remainder + // + override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { + if (_hasNext) { + if (remainder <= from) remainder = 0 // exhausted by big skip + else if (from <= 0) { // no skip, pos is same + if (until >= 0 && until < remainder) remainder = until // ...limited by until + } + else { + pos -= from // skip ahead + if (until >= 0 && until < remainder) { // ...limited by until + if (until <= from) remainder = 0 // ...exhausted if limit is smaller than skip + else remainder = until - from // ...limited by until, less the skip + } + else remainder -= from // ...otherwise just less the skip + } + } + this + } + } + + /** An `IndexedSeqOps` whose collection type and collection type constructor are unknown */ + type SomeIndexedSeqOps[A] = IndexedSeqOps[A, AnyConstr, _] + + @SerialVersionUID(3L) + class Id[+A](underlying: SomeIndexedSeqOps[A]) + extends SeqView.Id(underlying) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Appended[+A](underlying: SomeIndexedSeqOps[A], elem: A) + extends SeqView.Appended(underlying, elem) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A]) + extends SeqView.Prepended(elem, underlying) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Concat[A](prefix: SomeIndexedSeqOps[A], suffix: SomeIndexedSeqOps[A]) + extends SeqView.Concat[A](prefix, suffix) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Take[A](underlying: SomeIndexedSeqOps[A], n: Int) + extends SeqView.Take(underlying, n) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class TakeRight[A](underlying: SomeIndexedSeqOps[A], n: Int) + extends SeqView.TakeRight(underlying, n) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Drop[A](underlying: SomeIndexedSeqOps[A], n: Int) + extends SeqView.Drop[A](underlying, n) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class DropRight[A](underlying: SomeIndexedSeqOps[A], n: Int) + extends SeqView.DropRight[A](underlying, n) with IndexedSeqView[A] + + @SerialVersionUID(3L) + class Map[A, B](underlying: SomeIndexedSeqOps[A], f: A => B) + extends SeqView.Map(underlying, f) with IndexedSeqView[B] + + @SerialVersionUID(3L) + class Reverse[A](underlying: SomeIndexedSeqOps[A]) extends SeqView.Reverse[A](underlying) with IndexedSeqView[A] { + override def reverse: IndexedSeqView[A] = underlying match { + case x: IndexedSeqView[A] => x + case _ => super.reverse + } + } + + @SerialVersionUID(3L) + class Slice[A](underlying: SomeIndexedSeqOps[A], from: Int, until: Int) extends AbstractIndexedSeqView[A] { + protected val lo = from max 0 + protected val hi = (until max 0) min underlying.length + protected val len = (hi - lo) max 0 + @throws[IndexOutOfBoundsException] + def apply(i: Int): A = underlying(lo + i) + def length: Int = len + } +} + +/** Explicit instantiation of the `IndexedSeqView` trait to reduce class file size in subclasses. */ +@SerialVersionUID(3L) +abstract class AbstractIndexedSeqView[+A] extends AbstractSeqView[A] with IndexedSeqView[A] diff --git a/tests/pos-special/stdlib/collection/Iterable.scala b/tests/pos-special/stdlib/collection/Iterable.scala index 85c0debc6685..04647f215963 100644 --- a/tests/pos-special/stdlib/collection/Iterable.scala +++ b/tests/pos-special/stdlib/collection/Iterable.scala @@ -17,7 +17,6 @@ import scala.annotation.nowarn import scala.annotation.unchecked.uncheckedVariance import scala.collection.mutable.Builder import scala.collection.View.{LeftPartitionMapped, RightPartitionMapped} -import language.experimental.captureChecking /** Base trait for generic collections. * @@ -29,7 +28,6 @@ import language.experimental.captureChecking trait Iterable[+A] extends IterableOnce[A] with IterableOps[A, Iterable, Iterable[A]] with IterableFactoryDefaults[A, Iterable] { - this: Iterable[A]^ => // The collection itself @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") @@ -134,31 +132,29 @@ trait Iterable[+A] extends IterableOnce[A] * and may be nondeterministic. */ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with IterableOnceOps[A, CC, C] { - this: IterableOps[A, CC, C]^ => - /** * @return This collection as an `Iterable[A]`. No new collection will be built if `this` is already an `Iterable[A]`. */ // Should be `protected def asIterable`, or maybe removed altogether if it's not needed @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") - def toIterable: Iterable[A]^{this} + def toIterable: Iterable[A] /** Converts this $coll to an unspecified Iterable. Will return * the same collection if this instance is already Iterable. * @return An Iterable containing all elements of this $coll. */ @deprecated("toTraversable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.0") - final def toTraversable: Traversable[A]^{this} = toIterable + final def toTraversable: Traversable[A] = toIterable override def isTraversableAgain: Boolean = true /** * @return This collection as a `C`. */ - protected def coll: C^{this} + protected def coll: C @deprecated("Use coll instead of repr in a collection implementation, use the collection value itself from the outside", "2.13.0") - final def repr: C^{this} = coll + final def repr: C = coll /** * Defines how to turn a given `Iterable[A]` into a collection of type `C`. @@ -178,7 +174,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * `Iterable[A]` obtained from `this` collection (as it is the case in the * implementations of operations where we use a `View[A]`), it is safe. */ - protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): C^{coll} + protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): C /** The companion object of this ${coll}, providing various factory methods. * @@ -255,7 +251,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable def lastOption: Option[A] = if (isEmpty) None else Some(last) /** A view over the elements of this collection. */ - def view: View[A]^{this} = View.fromIteratorProvider(() => iterator) + def view: View[A] = View.fromIteratorProvider(() => iterator) /** Compares the size of this $coll to a test value. * @@ -305,7 +301,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * this.sizeIs > size // this.sizeCompare(size) > 0 * }}} */ - @inline final def sizeIs: IterableOps.SizeCompareOps^{this} = new IterableOps.SizeCompareOps(this) + @inline final def sizeIs: IterableOps.SizeCompareOps = new IterableOps.SizeCompareOps(this) /** Compares the size of this $coll to the size of another `Iterable`. * @@ -321,7 +317,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * is `O(this.size min that.size)` instead of `O(this.size + that.size)`. * The method should be overridden if computing `size` is cheap and `knownSize` returns `-1`. */ - def sizeCompare(that: Iterable[_]^): Int = { + def sizeCompare(that: Iterable[_]): Int = { val thatKnownSize = that.knownSize if (thatKnownSize >= 0) this sizeCompare thatKnownSize @@ -346,7 +342,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable /** A view over a slice of the elements of this collection. */ @deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0") - def view(from: Int, until: Int): View[A]^{this} = view.slice(from, until) + def view(from: Int, until: Int): View[A] = view.slice(from, until) /** Transposes this $coll of iterable collections into * a $coll of ${coll}s. @@ -382,7 +378,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @throws IllegalArgumentException if all collections in this $coll * are not of the same size. */ - def transpose[B](implicit asIterable: A -> /*<: /*<: Boolean): C^{this, pred} = fromSpecific(new View.Filter(this, pred, isFlipped = false)) + def filter(pred: A => Boolean): C = fromSpecific(new View.Filter(this, pred, isFlipped = false)) - def filterNot(pred: A => Boolean): C^{this, pred} = fromSpecific(new View.Filter(this, pred, isFlipped = true)) + def filterNot(pred: A => Boolean): C = fromSpecific(new View.Filter(this, pred, isFlipped = true)) /** Creates a non-strict filter of this $coll. * @@ -421,7 +417,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * All these operations apply to those elements of this $coll * which satisfy the predicate `p`. */ - def withFilter(p: A => Boolean): collection.WithFilter[A, CC]^{this, p} = new IterableOps.WithFilter(this, p) + def withFilter(p: A => Boolean): collection.WithFilter[A, CC] = new IterableOps.WithFilter(this, p) /** A pair of, first, all elements that satisfy predicate `p` and, second, * all elements that do not. Interesting because it splits a collection in two. @@ -430,15 +426,15 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * Strict collections have an overridden version of `partition` in `StrictOptimizedIterableOps`, * which requires only a single traversal. */ - def partition(p: A => Boolean): (C^{this, p}, C^{this, p}) = { + def partition(p: A => Boolean): (C, C) = { val first = new View.Filter(this, p, false) val second = new View.Filter(this, p, true) (fromSpecific(first), fromSpecific(second)) } - override def splitAt(n: Int): (C^{this}, C^{this}) = (take(n), drop(n)) + override def splitAt(n: Int): (C, C) = (take(n), drop(n)) - def take(n: Int): C^{this} = fromSpecific(new View.Take(this, n)) + def take(n: Int): C = fromSpecific(new View.Take(this, n)) /** Selects the last ''n'' elements. * $orderDependent @@ -447,7 +443,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * or else the whole $coll, if it has less than `n` elements. * If `n` is negative, returns an empty $coll. */ - def takeRight(n: Int): C^{this} = fromSpecific(new View.TakeRight(this, n)) + def takeRight(n: Int): C = fromSpecific(new View.TakeRight(this, n)) /** Takes longest prefix of elements that satisfy a predicate. * $orderDependent @@ -455,11 +451,11 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return the longest prefix of this $coll whose elements all satisfy * the predicate `p`. */ - def takeWhile(p: A => Boolean): C^{this, p} = fromSpecific(new View.TakeWhile(this, p)) + def takeWhile(p: A => Boolean): C = fromSpecific(new View.TakeWhile(this, p)) - def span(p: A => Boolean): (C^{this, p}, C^{this, p}) = (takeWhile(p), dropWhile(p)) + def span(p: A => Boolean): (C, C) = (takeWhile(p), dropWhile(p)) - def drop(n: Int): C^{this} = fromSpecific(new View.Drop(this, n)) + def drop(n: Int): C = fromSpecific(new View.Drop(this, n)) /** Selects all elements except last ''n'' ones. * $orderDependent @@ -468,9 +464,9 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * empty $coll, if this $coll has less than `n` elements. * If `n` is negative, don't drop any elements. */ - def dropRight(n: Int): C^{this} = fromSpecific(new View.DropRight(this, n)) + def dropRight(n: Int): C = fromSpecific(new View.DropRight(this, n)) - def dropWhile(p: A => Boolean): C^{this, p} = fromSpecific(new View.DropWhile(this, p)) + def dropWhile(p: A => Boolean): C = fromSpecific(new View.DropWhile(this, p)) /** Partitions elements in fixed size ${coll}s. * @see [[scala.collection.Iterator]], method `grouped` @@ -479,7 +475,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return An iterator producing ${coll}s of size `size`, except the * last will be less than size `size` if the elements don't divide evenly. */ - def grouped(size: Int): Iterator[C^{this}]^{this} = + def grouped(size: Int): Iterator[C] = iterator.grouped(size).map(fromSpecific) /** Groups elements in fixed size blocks by passing a "sliding window" @@ -501,7 +497,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @example `List(1, 2).sliding(2) = Iterator(List(1, 2))` * @example `List(1, 2, 3).sliding(2) = Iterator(List(1, 2), List(2, 3))` */ - def sliding(size: Int): Iterator[C^{this}]^{this} = sliding(size, 1) + def sliding(size: Int): Iterator[C] = sliding(size, 1) /** Groups elements in fixed size blocks by passing a "sliding window" * over them (as opposed to partitioning them, as is done in grouped.) @@ -520,13 +516,13 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * element (which may be the only element) will be smaller * if there are fewer than `size` elements remaining to be grouped. * @example `List(1, 2, 3, 4, 5).sliding(2, 2) = Iterator(List(1, 2), List(3, 4), List(5))` - * @example `List(1, 2, 3, 4, 5, 6).sliding(2, 3) = Iterator(List(1, 2), List(4, 5))` + * @example `List(1, 2, 3, 4, 5, 6).sliding(2, 3) = Iterator(List(1, 2), List(4, 5))` */ - def sliding(size: Int, step: Int): Iterator[C^{this}]^{this} = + def sliding(size: Int, step: Int): Iterator[C] = iterator.sliding(size, step).map(fromSpecific) /** The rest of the collection without its first element. */ - def tail: C^{this} = { + def tail: C = { if (isEmpty) throw new UnsupportedOperationException drop(1) } @@ -534,12 +530,12 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable /** The initial part of the collection without its last element. * $willForceEvaluation */ - def init: C^{this} = { + def init: C = { if (isEmpty) throw new UnsupportedOperationException dropRight(1) } - def slice(from: Int, until: Int): C^{this} = + def slice(from: Int, until: Int): C = fromSpecific(new View.Drop(new View.Take(this, until), from)) /** Partitions this $coll into a map of ${coll}s according to some discriminator function. @@ -649,9 +645,9 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * * @return a new $coll containing the prefix scan of the elements in this $coll */ - def scan[B >: A](z: B)(op: (B, B) => B): CC[B]^{this, op} = scanLeft(z)(op) + def scan[B >: A](z: B)(op: (B, B) => B): CC[B] = scanLeft(z)(op) - def scanLeft[B](z: B)(op: (B, A) => B): CC[B]^{this, op} = iterableFactory.from(new View.ScanLeft(this, z, op)) + def scanLeft[B](z: B)(op: (B, A) => B): CC[B] = iterableFactory.from(new View.ScanLeft(this, z, op)) /** Produces a collection containing cumulative results of applying the operator going right to left. * The head of the collection is the last cumulative result. @@ -669,7 +665,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @param op the binary operator applied to the intermediate result and the element * @return collection with intermediate results */ - def scanRight[B](z: B)(op: (A, B) => B): CC[B]^{this, op} = { + def scanRight[B](z: B)(op: (A, B) => B): CC[B] = { class Scanner extends runtime.AbstractFunction1[A, Unit] { var acc = z var scanned = acc :: immutable.Nil @@ -683,13 +679,13 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable iterableFactory.from(scanner.scanned) } - def map[B](f: A => B): CC[B]^{this, f} = iterableFactory.from(new View.Map(this, f)) + def map[B](f: A => B): CC[B] = iterableFactory.from(new View.Map(this, f)) - def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} = iterableFactory.from(new View.FlatMap(this, f)) + def flatMap[B](f: A => IterableOnce[B]): CC[B] = iterableFactory.from(new View.FlatMap(this, f)) - def flatten[B](implicit asIterable: A -> IterableOnce[B]): CC[B]^{this} = flatMap(asIterable) + def flatten[B](implicit asIterable: A => IterableOnce[B]): CC[B] = flatMap(asIterable) - def collect[B](pf: PartialFunction[A, B]^): CC[B]^{this, pf} = + def collect[B](pf: PartialFunction[A, B]): CC[B] = iterableFactory.from(new View.Collect(this, pf)) /** Applies a function `f` to each element of the $coll and returns a pair of ${coll}s: the first one @@ -710,12 +706,12 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @tparam A2 the element type of the second resulting collection * @param f the 'split function' mapping the elements of this $coll to an [[scala.util.Either]] * - * @return a pair of ${coll}s: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], + * @return a pair of ${coll}s: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], * and the second one made of those wrapped in [[scala.util.Right]]. */ - def partitionMap[A1, A2](f: A => Either[A1, A2]): (CC[A1]^{this, f}, CC[A2]^{this, f}) = { - val left: View[A1]^{f, this} = new LeftPartitionMapped(this, f) - val right: View[A2]^{f, this} = new RightPartitionMapped(this, f) + def partitionMap[A1, A2](f: A => Either[A1, A2]): (CC[A1], CC[A2]) = { + val left: View[A1] = new LeftPartitionMapped(this, f) + val right: View[A2] = new RightPartitionMapped(this, f) (iterableFactory.from(left), iterableFactory.from(right)) } @@ -728,13 +724,13 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return a new $coll which contains all elements * of this $coll followed by all elements of `suffix`. */ - def concat[B >: A](suffix: IterableOnce[B]^): CC[B]^{this, suffix} = iterableFactory.from(suffix match { + def concat[B >: A](suffix: IterableOnce[B]): CC[B] = iterableFactory.from(suffix match { case xs: Iterable[B] => new View.Concat(this, xs) case xs => iterator ++ suffix.iterator }) /** Alias for `concat` */ - @`inline` final def ++ [B >: A](suffix: IterableOnce[B]^): CC[B]^{this, suffix} = concat(suffix) + @`inline` final def ++ [B >: A](suffix: IterableOnce[B]): CC[B] = concat(suffix) /** Returns a $coll formed from this $coll and another iterable collection * by combining corresponding elements in pairs. @@ -745,12 +741,12 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return a new $coll containing pairs consisting of corresponding elements of this $coll and `that`. * The length of the returned collection is the minimum of the lengths of this $coll and `that`. */ - def zip[B](that: IterableOnce[B]^): CC[(A @uncheckedVariance, B)]^{this, that} = iterableFactory.from(that match { // sound bcs of VarianceNote + def zip[B](that: IterableOnce[B]): CC[(A @uncheckedVariance, B)] = iterableFactory.from(that match { // sound bcs of VarianceNote case that: Iterable[B] => new View.Zip(this, that) case _ => iterator.zip(that) }) - def zipWithIndex: CC[(A @uncheckedVariance, Int)]^{this} = iterableFactory.from(new View.ZipWithIndex(this)) + def zipWithIndex: CC[(A @uncheckedVariance, Int)] = iterableFactory.from(new View.ZipWithIndex(this)) /** Returns a $coll formed from this $coll and another iterable collection * by combining corresponding elements in pairs. @@ -766,7 +762,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * If this $coll is shorter than `that`, `thisElem` values are used to pad the result. * If `that` is shorter than this $coll, `thatElem` values are used to pad the result. */ - def zipAll[A1 >: A, B](that: Iterable[B]^, thisElem: A1, thatElem: B): CC[(A1, B)]^{this, that} = iterableFactory.from(new View.ZipAll(this, that, thisElem, thatElem)) + def zipAll[A1 >: A, B](that: Iterable[B], thisElem: A1, thatElem: B): CC[(A1, B)] = iterableFactory.from(new View.ZipAll(this, that, thisElem, thatElem)) /** Converts this $coll of pairs into two collections of the first and second * half of each pair. @@ -787,9 +783,9 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return a pair of ${coll}s, containing the first, respectively second * half of each element pair of this $coll. */ - def unzip[A1, A2](implicit asPair: A -> (A1, A2)): (CC[A1]^{this}, CC[A2]^{this}) = { - val first: View[A1]^{this} = new View.Map[A, A1](this, asPair(_)._1) - val second: View[A2]^{this} = new View.Map[A, A2](this, asPair(_)._2) + def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = { + val first: View[A1] = new View.Map[A, A1](this, asPair(_)._1) + val second: View[A2] = new View.Map[A, A2](this, asPair(_)._2) (iterableFactory.from(first), iterableFactory.from(second)) } @@ -814,10 +810,10 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return a triple of ${coll}s, containing the first, second, respectively * third member of each element triple of this $coll. */ - def unzip3[A1, A2, A3](implicit asTriple: A -> (A1, A2, A3)): (CC[A1]^{this}, CC[A2]^{this}, CC[A3]^{this}) = { - val first: View[A1]^{this} = new View.Map[A, A1](this, asTriple(_)._1) - val second: View[A2]^{this} = new View.Map[A, A2](this, asTriple(_)._2) - val third: View[A3]^{this} = new View.Map[A, A3](this, asTriple(_)._3) + def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { + val first: View[A1] = new View.Map[A, A1](this, asTriple(_)._1) + val second: View[A2] = new View.Map[A, A2](this, asTriple(_)._2) + val third: View[A3] = new View.Map[A, A3](this, asTriple(_)._3) (iterableFactory.from(first), iterableFactory.from(second), iterableFactory.from(third)) } @@ -828,7 +824,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return an iterator over all the tails of this $coll * @example `List(1,2,3).tails = Iterator(List(1,2,3), List(2,3), List(3), Nil)` */ - def tails: Iterator[C^{this}]^{this} = iterateUntilEmpty(_.tail) + def tails: Iterator[C] = iterateUntilEmpty(_.tail) /** Iterates over the inits of this $coll. The first value will be this * $coll and the final one will be an empty $coll, with the intervening @@ -839,24 +835,21 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return an iterator over all the inits of this $coll * @example `List(1,2,3).inits = Iterator(List(1,2,3), List(1,2), List(1), Nil)` */ - def inits: Iterator[C^{this}]^{this} = iterateUntilEmpty(_.init) + def inits: Iterator[C] = iterateUntilEmpty(_.init) - override def tapEach[U](f: A => U): C^{this, f} = fromSpecific(new View.Map(this, { (a: A) => f(a); a })) + override def tapEach[U](f: A => U): C = fromSpecific(new View.Map(this, { (a: A) => f(a); a })) // A helper for tails and inits. - private[this] def iterateUntilEmpty(f: Iterable[A]^{this} => Iterable[A]^{this}): Iterator[C^{this}]^{this, f} = { + private[this] def iterateUntilEmpty(f: Iterable[A] => Iterable[A]): Iterator[C] = { // toIterable ties the knot between `this: IterableOnceOps[A, CC, C]` and `this.tail: C` // `this.tail.tail` doesn't compile as `C` is unbounded // `Iterable.from(this)` would eagerly copy non-immutable collections - val it = Iterator.iterate(toIterable: @nowarn("cat=deprecation"))(f) - .takeWhile((itble: Iterable[A]^) => itble.iterator.nonEmpty) - // CC TODO type annotation for itble needed. - // The previous code `.takeWhile(_.iterator.nonEmpty)` does not work. + val it = Iterator.iterate(toIterable: @nowarn("cat=deprecation"))(f).takeWhile(_.nonEmpty) (it ++ Iterator.single(Iterable.empty)).map(fromSpecific) } @deprecated("Use ++ instead of ++: for collections of type Iterable", "2.13.0") - def ++:[B >: A](that: IterableOnce[B]^): CC[B]^{this, that} = iterableFactory.from(that match { + def ++:[B >: A](that: IterableOnce[B]): CC[B] = iterableFactory.from(that match { case xs: Iterable[B] => new View.Concat(xs, this) case _ => that.iterator ++ iterator }) @@ -869,8 +862,7 @@ object IterableOps { * These operations are implemented in terms of * [[scala.collection.IterableOps.sizeCompare(Int) `sizeCompare(Int)`]]. */ - final class SizeCompareOps private[collection](val it: IterableOps[_, AnyConstr, _]^) extends AnyVal { - this: SizeCompareOps^{it} => + final class SizeCompareOps private[collection](val it: IterableOps[_, AnyConstr, _]) extends AnyVal { /** Tests if the size of the collection is less than some value. */ @inline def <(size: Int): Boolean = it.sizeCompare(size) < 0 /** Tests if the size of the collection is less than or equal to some value. */ @@ -895,22 +887,22 @@ object IterableOps { */ @SerialVersionUID(3L) class WithFilter[+A, +CC[_]]( - self: IterableOps[A, CC, _]^, + self: IterableOps[A, CC, _], p: A => Boolean ) extends collection.WithFilter[A, CC] with Serializable { - protected def filtered: Iterable[A]^{this} = + protected def filtered: Iterable[A] = new View.Filter(self, p, isFlipped = false) - def map[B](f: A => B): CC[B]^{this} = + def map[B](f: A => B): CC[B] = self.iterableFactory.from(new View.Map(filtered, f)) - def flatMap[B](f: A => IterableOnce[B]): CC[B]^{this} = + def flatMap[B](f: A => IterableOnce[B]): CC[B] = self.iterableFactory.from(new View.FlatMap(filtered, f)) def foreach[U](f: A => U): Unit = filtered.foreach(f) - def withFilter(q: A => Boolean): WithFilter[A, CC]^{this, q} = + def withFilter(q: A => Boolean): WithFilter[A, CC] = new WithFilter(self, (a: A) => p(a) && q(a)) } @@ -948,7 +940,7 @@ abstract class AbstractIterable[+A] extends Iterable[A] * same as `C`. */ trait IterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]]] extends IterableOps[A, CC, CC[A @uncheckedVariance]] { - protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): CC[A @uncheckedVariance]^{coll} = iterableFactory.from(coll) + protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = iterableFactory.from(coll) protected def newSpecificBuilder: Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = iterableFactory.newBuilder[A] // overridden for efficiency, since we know CC[A] =:= C @@ -966,7 +958,7 @@ trait IterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]]] extends I trait EvidenceIterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]], Ev[_]] extends IterableOps[A, CC, CC[A @uncheckedVariance]] { protected def evidenceIterableFactory: EvidenceIterableFactory[CC, Ev] implicit protected def iterableEvidence: Ev[A @uncheckedVariance] - override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): CC[A @uncheckedVariance]^{coll} = evidenceIterableFactory.from(coll) + override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = evidenceIterableFactory.from(coll) override protected def newSpecificBuilder: Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = evidenceIterableFactory.newBuilder[A] override def empty: CC[A @uncheckedVariance] = evidenceIterableFactory.empty } @@ -988,11 +980,11 @@ trait SortedSetFactoryDefaults[+A, +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Set[x]] extends SortedSetOps[A @uncheckedVariance, CC, CC[A @uncheckedVariance]] { self: IterableOps[A, WithFilterCC, _] => - override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): CC[A @uncheckedVariance]^{coll} = sortedIterableFactory.from(coll)(ordering) + override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = sortedIterableFactory.from(coll)(ordering) override protected def newSpecificBuilder: mutable.Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = sortedIterableFactory.newBuilder[A](ordering) override def empty: CC[A @uncheckedVariance] = sortedIterableFactory.empty(ordering) - override def withFilter(p: A => Boolean): SortedSetOps.WithFilter[A, WithFilterCC, CC]^{p} = + override def withFilter(p: A => Boolean): SortedSetOps.WithFilter[A, WithFilterCC, CC] = new SortedSetOps.WithFilter[A, WithFilterCC, CC](this, p) } @@ -1012,8 +1004,7 @@ trait SortedSetFactoryDefaults[+A, trait MapFactoryDefaults[K, +V, +CC[x, y] <: IterableOps[(x, y), Iterable, Iterable[(x, y)]], +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Iterable[x]] extends MapOps[K, V, CC, CC[K, V @uncheckedVariance]] with IterableOps[(K, V), WithFilterCC, CC[K, V @uncheckedVariance]] { - this: MapFactoryDefaults[K, V, CC, WithFilterCC] => - override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]^): CC[K, V @uncheckedVariance]^{coll} = mapFactory.from(coll) + override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]): CC[K, V @uncheckedVariance] = mapFactory.from(coll) override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = mapFactory.newBuilder[K, V] override def empty: CC[K, V @uncheckedVariance] = (this: AnyRef) match { // Implemented here instead of in TreeSeqMap since overriding empty in TreeSeqMap is not forwards compatible (should be moved) @@ -1021,7 +1012,7 @@ trait MapFactoryDefaults[K, +V, case _ => mapFactory.empty } - override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, WithFilterCC, CC]^{p} = + override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, WithFilterCC, CC] = new MapOps.WithFilter[K, V, WithFilterCC, CC](this, p) } @@ -1044,9 +1035,9 @@ trait SortedMapFactoryDefaults[K, +V, self: IterableOps[(K, V), WithFilterCC, _] => override def empty: CC[K, V @uncheckedVariance] = sortedMapFactory.empty(ordering) - override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]^): CC[K, V @uncheckedVariance]^{coll} = sortedMapFactory.from(coll)(ordering) + override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]): CC[K, V @uncheckedVariance] = sortedMapFactory.from(coll)(ordering) override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = sortedMapFactory.newBuilder[K, V](ordering) - override def withFilter(p: ((K, V)) => Boolean): collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC]^{p} = + override def withFilter(p: ((K, V)) => Boolean): collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC] = new collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC](this, p) } diff --git a/tests/pos-special/stdlib/collection/IterableOnce.scala b/tests/pos-special/stdlib/collection/IterableOnce.scala index 6836a3bac39a..65d8dce08ae4 100644 --- a/tests/pos-special/stdlib/collection/IterableOnce.scala +++ b/tests/pos-special/stdlib/collection/IterableOnce.scala @@ -14,13 +14,12 @@ package scala package collection import scala.annotation.tailrec -import scala.annotation.unchecked.{uncheckedVariance, uncheckedCaptures} +import scala.annotation.unchecked.uncheckedVariance import scala.collection.mutable.StringBuilder import scala.language.implicitConversions import scala.math.{Numeric, Ordering} import scala.reflect.ClassTag import scala.runtime.AbstractFunction2 -import language.experimental.captureChecking /** * A template trait for collections which can be traversed either once only @@ -43,10 +42,8 @@ import language.experimental.captureChecking * @define coll collection */ trait IterableOnce[+A] extends Any { - this: IterableOnce[A]^ => - /** Iterator can be used only once */ - def iterator: Iterator[A]^{this} + def iterator: Iterator[A] /** Returns a [[scala.collection.Stepper]] for the elements of this collection. * @@ -68,9 +65,9 @@ trait IterableOnce[+A] extends Any { * allow creating parallel streams, whereas bare Steppers can be converted only to sequential * streams. */ - def stepper[S <: Stepper[_]^{this}](implicit shape: StepperShape[A, S]): S = { + def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S = { import convert.impl._ - val s: Any = shape.shape match { + val s = shape.shape match { case StepperShape.IntShape => new IntIteratorStepper (iterator.asInstanceOf[Iterator[Int]]) case StepperShape.LongShape => new LongIteratorStepper (iterator.asInstanceOf[Iterator[Long]]) case StepperShape.DoubleShape => new DoubleIteratorStepper(iterator.asInstanceOf[Iterator[Double]]) @@ -87,7 +84,7 @@ trait IterableOnce[+A] extends Any { final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) extends AnyVal { @deprecated("Use .iterator.withFilter(...) instead", "2.13.0") - def withFilter(f: A => Boolean): Iterator[A]^{f} = it.iterator.withFilter(f) + def withFilter(f: A => Boolean): Iterator[A] = it.iterator.withFilter(f) @deprecated("Use .iterator.reduceLeftOption(...) instead", "2.13.0") def reduceLeftOption(f: (A, A) => A): Option[A] = it.iterator.reduceLeftOption(f) @@ -105,7 +102,7 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext def reduceRight(f: (A, A) => A): A = it.iterator.reduceRight(f) @deprecated("Use .iterator.maxBy(...) instead", "2.13.0") - def maxBy[B](f: A -> B)(implicit cmp: Ordering[B]): A = it.iterator.maxBy(f) + def maxBy[B](f: A => B)(implicit cmp: Ordering[B]): A = it.iterator.maxBy(f) @deprecated("Use .iterator.reduceLeft(...) instead", "2.13.0") def reduceLeft(f: (A, A) => A): A = it.iterator.reduceLeft(f) @@ -123,7 +120,7 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext def reduceOption(f: (A, A) => A): Option[A] = it.iterator.reduceOption(f) @deprecated("Use .iterator.minBy(...) instead", "2.13.0") - def minBy[B](f: A -> B)(implicit cmp: Ordering[B]): A = it.iterator.minBy(f) + def minBy[B](f: A => B)(implicit cmp: Ordering[B]): A = it.iterator.minBy(f) @deprecated("Use .iterator.size instead", "2.13.0") def size: Int = it.iterator.size @@ -135,7 +132,7 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext def collectFirst[B](f: PartialFunction[A, B]): Option[B] = it.iterator.collectFirst(f) @deprecated("Use .iterator.filter(...) instead", "2.13.0") - def filter(f: A => Boolean): Iterator[A]^{f} = it.iterator.filter(f) + def filter(f: A => Boolean): Iterator[A] = it.iterator.filter(f) @deprecated("Use .iterator.exists(...) instead", "2.13.0") def exists(f: A => Boolean): Boolean = it.iterator.exists(f) @@ -241,13 +238,13 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext @`inline` def :\ [B](z: B)(op: (A, B) => B): B = foldRight[B](z)(op) @deprecated("Use .iterator.map instead or consider requiring an Iterable", "2.13.0") - def map[B](f: A => B): IterableOnce[B]^{f} = it match { + def map[B](f: A => B): IterableOnce[B] = it match { case it: Iterable[A] => it.map(f) case _ => it.iterator.map(f) } @deprecated("Use .iterator.flatMap instead or consider requiring an Iterable", "2.13.0") - def flatMap[B](f: A => IterableOnce[B]^): IterableOnce[B]^{f} = it match { + def flatMap[B](f: A => IterableOnce[B]): IterableOnce[B] = it match { case it: Iterable[A] => it.flatMap(f) case _ => it.iterator.flatMap(f) } @@ -318,11 +315,9 @@ object IterableOnce { * @define coll collection * */ -trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => +trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => /////////////////////////////////////////////////////////////// Abstract methods that must be implemented - import IterableOnceOps.Maximized - /** Produces a $coll containing cumulative results of applying the * operator going left to right, including the initial value. * @@ -334,7 +329,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @param op the binary operator applied to the intermediate result and the element * @return collection with intermediate results */ - def scanLeft[B](z: B)(op: (B, A) => B): CC[B]^{this, op} + def scanLeft[B](z: B)(op: (B, A) => B): CC[B] /** Selects all elements of this $coll which satisfy a predicate. * @@ -342,7 +337,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return a new $coll consisting of all elements of this $coll that satisfy the given * predicate `p`. The order of the elements is preserved. */ - def filter(p: A => Boolean): C^{this, p} + def filter(p: A => Boolean): C /** Selects all elements of this $coll which do not satisfy a predicate. * @@ -350,7 +345,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return a new $coll consisting of all elements of this $coll that do not satisfy the given * predicate `pred`. Their order may not be preserved. */ - def filterNot(p: A => Boolean): C^{this, p} + def filterNot(pred: A => Boolean): C /** Selects the first ''n'' elements. * $orderDependent @@ -359,7 +354,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * or else the whole $coll, if it has less than `n` elements. * If `n` is negative, returns an empty $coll. */ - def take(n: Int): C^{this} + def take(n: Int): C /** Takes longest prefix of elements that satisfy a predicate. * $orderDependent @@ -367,7 +362,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return the longest prefix of this $coll whose elements all satisfy * the predicate `p`. */ - def takeWhile(p: A => Boolean): C^{this, p} + def takeWhile(p: A => Boolean): C /** Selects all elements except first ''n'' ones. * $orderDependent @@ -376,7 +371,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * empty $coll, if this $coll has less than `n` elements. * If `n` is negative, don't drop any elements. */ - def drop(n: Int): C^{this} + def drop(n: Int): C /** Drops longest prefix of elements that satisfy a predicate. * $orderDependent @@ -384,7 +379,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return the longest suffix of this $coll whose first element * does not satisfy the predicate `p`. */ - def dropWhile(p: A => Boolean): C^{this, p} + def dropWhile(p: A => Boolean): C /** Selects an interval of elements. The returned $coll is made up * of all elements `x` which satisfy the invariant: @@ -399,7 +394,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * index `from` extending up to (but not including) index `until` * of this $coll. */ - def slice(from: Int, until: Int): C^{this} + def slice(from: Int, until: Int): C /** Builds a new $coll by applying a function to all elements of this $coll. * @@ -408,7 +403,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return a new $coll resulting from applying the given function * `f` to each element of this $coll and collecting the results. */ - def map[B](f: A => B): CC[B]^{this, f} + def map[B](f: A => B): CC[B] /** Builds a new $coll by applying a function to all elements of this $coll * and using the elements of the resulting collections. @@ -441,7 +436,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return a new $coll resulting from applying the given collection-valued function * `f` to each element of this $coll and concatenating the results. */ - def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} + def flatMap[B](f: A => IterableOnce[B]): CC[B] /** Converts this $coll of iterable collections into * a $coll formed by the elements of these iterable @@ -469,7 +464,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * type of this $coll is an `Iterable`. * @return a new $coll resulting from concatenating all element ${coll}s. */ - def flatten[B](implicit asIterable: A -> IterableOnce[B]): CC[B]^{this} + def flatten[B](implicit asIterable: A => IterableOnce[B]): CC[B] /** Builds a new $coll by applying a partial function to all elements of this $coll * on which the function is defined. @@ -480,7 +475,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * `pf` to each element on which it is defined and collecting the results. * The order of the elements is preserved. */ - def collect[B](pf: PartialFunction[A, B]^): CC[B]^{this, pf} + def collect[B](pf: PartialFunction[A, B]): CC[B] /** Zips this $coll with its indices. * @@ -489,7 +484,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @example * `List("a", "b", "c").zipWithIndex == List(("a", 0), ("b", 1), ("c", 2))` */ - def zipWithIndex: CC[(A @uncheckedVariance, Int)]^{this} + def zipWithIndex: CC[(A @uncheckedVariance, Int)] /** Splits this $coll into a prefix/suffix pair according to a predicate. * @@ -502,7 +497,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return a pair consisting of the longest prefix of this $coll whose * elements all satisfy `p`, and the rest of this $coll. */ - def span(p: A => Boolean): (C^{this, p}, C^{this, p}) + def span(p: A => Boolean): (C, C) /** Splits this $coll into a prefix/suffix pair at a given position. * @@ -514,7 +509,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return a pair of ${coll}s consisting of the first `n` * elements of this $coll, and the other elements. */ - def splitAt(n: Int): (C^{this}, C^{this}) = { + def splitAt(n: Int): (C, C) = { class Spanner extends runtime.AbstractFunction1[A, Boolean] { var i = 0 def apply(a: A) = i < n && { i += 1 ; true } @@ -532,7 +527,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @tparam U the return type of f * @return The same logical collection as this */ - def tapEach[U](f: A => U): C^{this, f} + def tapEach[U](f: A => U): C /////////////////////////////////////////////////////////////// Concrete methods based on iterator @@ -807,7 +802,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => case _ => Some(reduceLeft(op)) } private final def reduceLeftOptionIterator[B >: A](op: (B, A) => B): Option[B] = reduceOptionIterator[A, B](iterator)(op) - private final def reduceOptionIterator[X >: A, B >: X](it: Iterator[X]^)(op: (B, X) => B): Option[B] = { + private final def reduceOptionIterator[X >: A, B >: X](it: Iterator[X])(op: (B, X) => B): Option[B] = { if (it.hasNext) { var acc: B = it.next() while (it.hasNext) @@ -1046,12 +1041,35 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return the first element of this $coll with the largest value measured by function f * with respect to the ordering `cmp`. */ - def maxBy[B](f: A -> B)(implicit ord: Ordering[B]): A = + def maxBy[B](f: A => B)(implicit ord: Ordering[B]): A = knownSize match { case 0 => throw new UnsupportedOperationException("empty.maxBy") case _ => foldLeft(new Maximized[A, B]("maxBy")(f)(ord.gt))((m, a) => m(m, a)).result } + private class Maximized[X, B](descriptor: String)(f: X => B)(cmp: (B, B) => Boolean) extends AbstractFunction2[Maximized[X, B], X, Maximized[X, B]] { + var maxElem: X = null.asInstanceOf[X] + var maxF: B = null.asInstanceOf[B] + var nonEmpty = false + def toOption: Option[X] = if (nonEmpty) Some(maxElem) else None + def result: X = if (nonEmpty) maxElem else throw new UnsupportedOperationException(s"empty.$descriptor") + def apply(m: Maximized[X, B], a: X): Maximized[X, B] = + if (m.nonEmpty) { + val fa = f(a) + if (cmp(fa, maxF)) { + maxF = fa + maxElem = a + } + m + } + else { + m.nonEmpty = true + m.maxElem = a + m.maxF = f(a) + m + } + } + /** Finds the first element which yields the largest value measured by function f. * * $willNotTerminateInf @@ -1062,7 +1080,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return an option value containing the first element of this $coll with the * largest value measured by function f with respect to the ordering `cmp`. */ - def maxByOption[B](f: A -> B)(implicit ord: Ordering[B]): Option[A] = + def maxByOption[B](f: A => B)(implicit ord: Ordering[B]): Option[A] = knownSize match { case 0 => None case _ => foldLeft(new Maximized[A, B]("maxBy")(f)(ord.gt))((m, a) => m(m, a)).toOption @@ -1079,7 +1097,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * @return the first element of this $coll with the smallest value measured by function f * with respect to the ordering `cmp`. */ - def minBy[B](f: A -> B)(implicit ord: Ordering[B]): A = + def minBy[B](f: A => B)(implicit ord: Ordering[B]): A = knownSize match { case 0 => throw new UnsupportedOperationException("empty.minBy") case _ => foldLeft(new Maximized[A, B]("minBy")(f)(ord.lt))((m, a) => m(m, a)).result @@ -1096,7 +1114,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => * with the smallest value measured by function f * with respect to the ordering `cmp`. */ - def minByOption[B](f: A -> B)(implicit ord: Ordering[B]): Option[A] = + def minByOption[B](f: A => B)(implicit ord: Ordering[B]): Option[A] = knownSize match { case 0 => None case _ => foldLeft(new Maximized[A, B]("minBy")(f)(ord.lt))((m, a) => m(m, a)).toOption @@ -1292,7 +1310,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => def to[C1](factory: Factory[A, C1]): C1 = factory.fromSpecific(this) @deprecated("Use .iterator instead of .toIterator", "2.13.0") - @`inline` final def toIterator: Iterator[A]^{this} = iterator + @`inline` final def toIterator: Iterator[A] = iterator def toList: immutable.List[A] = immutable.List.from(this) @@ -1334,31 +1352,3 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => xs } } - -object IterableOnceOps: - - // Moved out of trait IterableOnceOps to here, since universal traits cannot - // have nested classes in Scala 3 - private class Maximized[X, B](descriptor: String)(f: X -> B)(cmp: (B, B) -> Boolean) extends AbstractFunction2[Maximized[X, B], X, Maximized[X, B]] { - var maxElem: X @uncheckedCaptures = null.asInstanceOf[X] - var maxF: B @uncheckedCaptures = null.asInstanceOf[B] - var nonEmpty = false - def toOption: Option[X] = if (nonEmpty) Some(maxElem) else None - def result: X = if (nonEmpty) maxElem else throw new UnsupportedOperationException(s"empty.$descriptor") - def apply(m: Maximized[X, B], a: X): Maximized[X, B] = - if (m.nonEmpty) { - val fa = f(a) - if (cmp(fa, maxF)) { - maxF = fa - maxElem = a - } - m - } - else { - m.nonEmpty = true - m.maxElem = a - m.maxF = f(a) - m - } - } -end IterableOnceOps \ No newline at end of file diff --git a/tests/pos-special/stdlib/collection/Iterator.scala b/tests/pos-special/stdlib/collection/Iterator.scala index ecd8d985bbf0..4b8338ed1b17 100644 --- a/tests/pos-special/stdlib/collection/Iterator.scala +++ b/tests/pos-special/stdlib/collection/Iterator.scala @@ -14,11 +14,8 @@ package scala.collection import scala.collection.mutable.{ArrayBuffer, ArrayBuilder, Builder, ImmutableBuilder} import scala.annotation.tailrec -import scala.annotation.unchecked.{uncheckedVariance, uncheckedCaptures} +import scala.annotation.unchecked.uncheckedVariance import scala.runtime.Statics -import language.experimental.captureChecking -import caps.unsafe.unsafeAssumePure - /** Iterators are data structures that allow to iterate over a sequence * of elements. They have a `hasNext` method for checking @@ -74,8 +71,7 @@ import caps.unsafe.unsafeAssumePure * iterators as well. * @define coll iterator */ -trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Iterator[A]] { - self: Iterator[A]^ => +trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Iterator[A]] { self => /** Check if there is a next element available. * @@ -97,7 +93,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite @throws[NoSuchElementException] def next(): A - @inline final def iterator: Iterator[A]^{this} = this + @inline final def iterator = this /** Wraps the value of `next()` in an option. * @@ -121,7 +117,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @return a buffered iterator producing the same values as this iterator. * @note Reuse: $consumesAndProducesIterator */ - def buffered: BufferedIterator[A]^{this} = new AbstractIterator[A] with BufferedIterator[A] { + def buffered: BufferedIterator[A] = new AbstractIterator[A] with BufferedIterator[A] { private[this] var hd: A = _ private[this] var hdDefined: Boolean = false @@ -157,16 +153,16 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * A `GroupedIterator` is yielded by `grouped` and by `sliding`, * where the `step` may differ from the group `size`. */ - class GroupedIterator[B >: A](self: Iterator[B]^, size: Int, step: Int) extends AbstractIterator[immutable.Seq[B]] { + class GroupedIterator[B >: A](self: Iterator[B], size: Int, step: Int) extends AbstractIterator[immutable.Seq[B]] { require(size >= 1 && step >= 1, f"size=$size%d and step=$step%d, but both must be positive") - private[this] var buffer: Array[B @uncheckedCaptures] = null // current result - private[this] var prev: Array[B @uncheckedCaptures] = null // if sliding, overlap from previous result + private[this] var buffer: Array[B] = null // current result + private[this] var prev: Array[B] = null // if sliding, overlap from previous result private[this] var first = true // if !first, advancing may skip ahead private[this] var filled = false // whether the buffer is "hot" private[this] var partial = true // whether to emit partial sequence - private[this] var padding: () -> B @uncheckedCaptures = null // what to pad short sequences with + private[this] var padding: () => B = null // what to pad short sequences with private[this] def pad = padding != null // irrespective of partial flag private[this] def newBuilder = { val b = ArrayBuilder.make[Any] @@ -189,7 +185,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @note This method is mutually exclusive with `withPartial`. * @group Configuration */ - def withPadding(x: -> B): this.type = { + def withPadding(x: => B): this.type = { padding = () => x partial = true // redundant, as padding always results in complete segment this @@ -295,7 +291,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * all elements of this $coll followed by the minimal number of occurrences of `elem` so * that the resulting collection has a length of at least `len`. */ - def padTo[B >: A](len: Int, elem: B): Iterator[B]^{this} = new AbstractIterator[B] { + def padTo[B >: A](len: Int, elem: B): Iterator[B] = new AbstractIterator[B] { private[this] var i = 0 override def knownSize: Int = { @@ -325,7 +321,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * is the same as in the original iterator. * @note Reuse: $consumesOneAndProducesTwoIterators */ - def partition(p: A => Boolean): (Iterator[A]^{this, p}, Iterator[A]^{this, p}) = { + def partition(p: A => Boolean): (Iterator[A], Iterator[A]) = { val (a, b) = duplicate (a filter p, b filterNot p) } @@ -345,7 +341,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @note Reuse: $consumesAndProducesIterator */ - def grouped[B >: A](size: Int): GroupedIterator[B]^{this} = + def grouped[B >: A](size: Int): GroupedIterator[B] = new GroupedIterator[B](self, size, size) /** Returns an iterator which presents a "sliding window" view of @@ -381,13 +377,13 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @note Reuse: $consumesAndProducesIterator */ - def sliding[B >: A](size: Int, step: Int = 1): GroupedIterator[B]^{this} = + def sliding[B >: A](size: Int, step: Int = 1): GroupedIterator[B] = new GroupedIterator[B](self, size, step) - def scanLeft[B](z: B)(op: (B, A) => B): Iterator[B]^{this, op} = new AbstractIterator[B] { + def scanLeft[B](z: B)(op: (B, A) => B): Iterator[B] = new AbstractIterator[B] { // We use an intermediate iterator that iterates through the first element `z` // and then that will be modified to iterate through the collection - private[this] var current: Iterator[B]^{self, op} = + private[this] var current: Iterator[B] = new AbstractIterator[B] { override def knownSize = { val thisSize = self.knownSize @@ -416,7 +412,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } @deprecated("Call scanRight on an Iterable instead.", "2.13.0") - def scanRight[B](z: B)(op: (A, B) => B): Iterator[B]^{this} = ArrayBuffer.from(this).scanRight(z)(op).iterator + def scanRight[B](z: B)(op: (A, B) => B): Iterator[B] = ArrayBuffer.from(this).scanRight(z)(op).iterator def indexWhere(p: A => Boolean, from: Int = 0): Int = { var i = math.max(from, 0) @@ -469,11 +465,11 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite @deprecatedOverriding("isEmpty is defined as !hasNext; override hasNext instead", "2.13.0") override def isEmpty: Boolean = !hasNext - def filter(p: A => Boolean): Iterator[A]^{this, p} = filterImpl(p, isFlipped = false) + def filter(p: A => Boolean): Iterator[A] = filterImpl(p, isFlipped = false) - def filterNot(p: A => Boolean): Iterator[A]^{this, p} = filterImpl(p, isFlipped = true) + def filterNot(p: A => Boolean): Iterator[A] = filterImpl(p, isFlipped = true) - private[collection] def filterImpl(p: A => Boolean, isFlipped: Boolean): Iterator[A]^{this, p} = new AbstractIterator[A] { + private[collection] def filterImpl(p: A => Boolean, isFlipped: Boolean): Iterator[A] = new AbstractIterator[A] { private[this] var hd: A = _ private[this] var hdDefined: Boolean = false @@ -483,9 +479,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite while (p(hd) == isFlipped) { if (!self.hasNext) return false hd = self.next() - } + } hdDefined = true - true + true } def next() = @@ -507,9 +503,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @return an iterator which produces those values of this iterator which satisfy the predicate `p`. * @note Reuse: $consumesAndProducesIterator */ - def withFilter(p: A => Boolean): Iterator[A]^{this, p} = filter(p) + def withFilter(p: A => Boolean): Iterator[A] = filter(p) - def collect[B](pf: PartialFunction[A, B]^): Iterator[B]^{this, pf} = new AbstractIterator[B] with (A -> B) { + def collect[B](pf: PartialFunction[A, B]): Iterator[B] = new AbstractIterator[B] with (A => B) { // Manually buffer to avoid extra layer of wrapping with buffered private[this] var hd: B = _ @@ -545,7 +541,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @note Reuse: $consumesIterator */ - def distinct: Iterator[A]^{this} = distinctBy(identity) + def distinct: Iterator[A] = distinctBy(identity) /** * Builds a new iterator from this one without any duplicated elements as determined by `==` after applying @@ -557,7 +553,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @note Reuse: $consumesIterator */ - def distinctBy[B](f: A -> B): Iterator[A]^{this} = new AbstractIterator[A] { + def distinctBy[B](f: A => B): Iterator[A] = new AbstractIterator[A] { private[this] val traversedValues = mutable.HashSet.empty[B] private[this] var nextElementDefined: Boolean = false @@ -582,14 +578,14 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } } - def map[B](f: A => B): Iterator[B]^{this, f} = new AbstractIterator[B] { + def map[B](f: A => B): Iterator[B] = new AbstractIterator[B] { override def knownSize = self.knownSize def hasNext = self.hasNext def next() = f(self.next()) } - def flatMap[B](f: A => IterableOnce[B]^): Iterator[B]^{this, f} = new AbstractIterator[B] { - private[this] var cur: Iterator[B]^{f} = Iterator.empty + def flatMap[B](f: A => IterableOnce[B]): Iterator[B] = new AbstractIterator[B] { + private[this] var cur: Iterator[B] = Iterator.empty /** Trillium logic boolean: -1 = unknown, 0 = false, 1 = true */ private[this] var _hasNext: Int = -1 @@ -623,19 +619,19 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } } - def flatten[B](implicit ev: A -> IterableOnce[B]): Iterator[B]^{this} = + def flatten[B](implicit ev: A => IterableOnce[B]): Iterator[B] = flatMap[B](ev) - def concat[B >: A](xs: => IterableOnce[B]^): Iterator[B]^{this, xs} = new Iterator.ConcatIterator[B](self).concat(xs) + def concat[B >: A](xs: => IterableOnce[B]): Iterator[B] = new Iterator.ConcatIterator[B](self).concat(xs) - @`inline` final def ++ [B >: A](xs: => IterableOnce[B]^): Iterator[B]^{this, xs} = concat(xs) + @`inline` final def ++ [B >: A](xs: => IterableOnce[B]): Iterator[B] = concat(xs) - def take(n: Int): Iterator[A]^{this} = sliceIterator(0, n max 0) + def take(n: Int): Iterator[A] = sliceIterator(0, n max 0) - def takeWhile(p: A => Boolean): Iterator[A]^{self, p} = new AbstractIterator[A] { + def takeWhile(p: A => Boolean): Iterator[A] = new AbstractIterator[A] { private[this] var hd: A = _ private[this] var hdDefined: Boolean = false - private[this] var tail: Iterator[A]^{self} = self + private[this] var tail: Iterator[A] = self def hasNext = hdDefined || tail.hasNext && { hd = tail.next() @@ -646,9 +642,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite def next() = if (hasNext) { hdDefined = false; hd } else Iterator.empty.next() } - def drop(n: Int): Iterator[A]^{this} = sliceIterator(n, -1) + def drop(n: Int): Iterator[A] = sliceIterator(n, -1) - def dropWhile(p: A => Boolean): Iterator[A]^{this, p} = new AbstractIterator[A] { + def dropWhile(p: A => Boolean): Iterator[A] = new AbstractIterator[A] { // Magic value: -1 = hasn't dropped, 0 = found first, 1 = defer to parent iterator private[this] var status = -1 // Local buffering to avoid double-wrap with .buffered @@ -684,7 +680,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @note Reuse: $consumesOneAndProducesTwoIterators */ - def span(p: A => Boolean): (Iterator[A]^{this, p}, Iterator[A]^{this, p}) = { + def span(p: A => Boolean): (Iterator[A], Iterator[A]) = { /* * Giving a name to following iterator (as opposed to trailing) because * anonymous class is represented as a structural type that trailing @@ -783,10 +779,10 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite (leading, trailing) } - def slice(from: Int, until: Int): Iterator[A]^{this} = sliceIterator(from, until max 0) + def slice(from: Int, until: Int): Iterator[A] = sliceIterator(from, until max 0) /** Creates an optionally bounded slice, unbounded if `until` is negative. */ - protected def sliceIterator(from: Int, until: Int): Iterator[A]^{this} = { + protected def sliceIterator(from: Int, until: Int): Iterator[A] = { val lo = from max 0 val rest = if (until < 0) -1 // unbounded @@ -797,14 +793,14 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite else new Iterator.SliceIterator(this, lo, rest) } - def zip[B](that: IterableOnce[B]^): Iterator[(A, B)]^{this, that} = new AbstractIterator[(A, B)] { + def zip[B](that: IterableOnce[B]): Iterator[(A, B)] = new AbstractIterator[(A, B)] { val thatIterator = that.iterator override def knownSize = self.knownSize min thatIterator.knownSize def hasNext = self.hasNext && thatIterator.hasNext def next() = (self.next(), thatIterator.next()) } - def zipAll[A1 >: A, B](that: IterableOnce[B]^, thisElem: A1, thatElem: B): Iterator[(A1, B)]^{this, that} = new AbstractIterator[(A1, B)] { + def zipAll[A1 >: A, B](that: IterableOnce[B], thisElem: A1, thatElem: B): Iterator[(A1, B)] = new AbstractIterator[(A1, B)] { val thatIterator = that.iterator override def knownSize = { val thisSize = self.knownSize @@ -821,7 +817,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } } - def zipWithIndex: Iterator[(A, Int)]^{this} = new AbstractIterator[(A, Int)] { + def zipWithIndex: Iterator[(A, Int)] = new AbstractIterator[(A, Int)] { var idx = 0 override def knownSize = self.knownSize def hasNext = self.hasNext @@ -841,7 +837,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @inheritdoc */ - def sameElements[B >: A](that: IterableOnce[B]^): Boolean = { + def sameElements[B >: A](that: IterableOnce[B]): Boolean = { val those = that.iterator while (hasNext && those.hasNext) if (next() != those.next()) @@ -864,7 +860,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * iterated by one iterator but not yet by the other. * @note Reuse: $consumesOneAndProducesTwoIterators */ - def duplicate: (Iterator[A]^{this}, Iterator[A]^{this}) = { + def duplicate: (Iterator[A], Iterator[A]) = { val gap = new scala.collection.mutable.Queue[A] var ahead: Iterator[A] = null class Partner extends AbstractIterator[A] { @@ -908,7 +904,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @param replaced The number of values in the original iterator that are replaced by the patch. * @note Reuse: $consumesTwoAndProducesOneIterator */ - def patch[B >: A](from: Int, patchElems: Iterator[B]^, replaced: Int): Iterator[B]^{this, patchElems} = + def patch[B >: A](from: Int, patchElems: Iterator[B], replaced: Int): Iterator[B] = new AbstractIterator[B] { private[this] var origElems = self // > 0 => that many more elems from `origElems` before switching to `patchElems` @@ -948,7 +944,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } } - override def tapEach[U](f: A => U): Iterator[A]^{this, f} = new AbstractIterator[A] { + override def tapEach[U](f: A => U): Iterator[A] = new AbstractIterator[A] { override def knownSize = self.knownSize override def hasNext = self.hasNext override def next() = { @@ -985,7 +981,7 @@ object Iterator extends IterableFactory[Iterator] { * @tparam A the type of the collection’s elements * @return a new $coll with the elements of `source` */ - override def from[A](source: IterableOnce[A]^): Iterator[A]^{source} = source.iterator + override def from[A](source: IterableOnce[A]): Iterator[A] = source.iterator /** The iterator which produces no values. */ @`inline` final def empty[T]: Iterator[T] = _empty @@ -1016,7 +1012,7 @@ object Iterator extends IterableFactory[Iterator] { * @param elem the element computation * @return An iterator that produces the results of `n` evaluations of `elem`. */ - override def fill[A](len: Int)(elem: => A): Iterator[A]^{elem} = new AbstractIterator[A] { + override def fill[A](len: Int)(elem: => A): Iterator[A] = new AbstractIterator[A] { private[this] var i = 0 override def knownSize: Int = (len - i) max 0 def hasNext: Boolean = i < len @@ -1031,7 +1027,7 @@ object Iterator extends IterableFactory[Iterator] { * @param f The function computing element values * @return An iterator that produces the values `f(0), ..., f(n -1)`. */ - override def tabulate[A](end: Int)(f: Int => A): Iterator[A]^{f} = new AbstractIterator[A] { + override def tabulate[A](end: Int)(f: Int => A): Iterator[A] = new AbstractIterator[A] { private[this] var i = 0 override def knownSize: Int = (end - i) max 0 def hasNext: Boolean = i < end @@ -1104,7 +1100,7 @@ object Iterator extends IterableFactory[Iterator] { * @param f the function that's repeatedly applied * @return the iterator producing the infinite sequence of values `start, f(start), f(f(start)), ...` */ - def iterate[T](start: T)(f: T => T): Iterator[T]^{f} = new AbstractIterator[T] { + def iterate[T](start: T)(f: T => T): Iterator[T] = new AbstractIterator[T] { private[this] var first = true private[this] var acc = start def hasNext: Boolean = true @@ -1126,7 +1122,7 @@ object Iterator extends IterableFactory[Iterator] { * @tparam S Type of the internal state * @return an Iterator that produces elements using `f` until `f` returns `None` */ - override def unfold[A, S](init: S)(f: S => Option[(A, S)]): Iterator[A]^{f} = new UnfoldIterator(init)(f) + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): Iterator[A] = new UnfoldIterator(init)(f) /** Creates an infinite-length iterator returning the results of evaluating an expression. * The expression is recomputed for every element. @@ -1134,7 +1130,7 @@ object Iterator extends IterableFactory[Iterator] { * @param elem the element computation. * @return the iterator containing an infinite number of results of evaluating `elem`. */ - def continually[A](elem: => A): Iterator[A]^{elem} = new AbstractIterator[A] { + def continually[A](elem: => A): Iterator[A] = new AbstractIterator[A] { def hasNext = true def next() = elem } @@ -1142,12 +1138,9 @@ object Iterator extends IterableFactory[Iterator] { /** Creates an iterator to which other iterators can be appended efficiently. * Nested ConcatIterators are merged to avoid blowing the stack. */ - private final class ConcatIterator[+A](val from: Iterator[A]^) extends AbstractIterator[A] { - private var current: Iterator[A @uncheckedCaptures] = from.unsafeAssumePure - // This should be Iteratpr[A]^, but fails since mutable variables can't capture cap. - // To do better we'd need to track nesting levels for universal capabiltities. - private var tail: ConcatIteratorCell[A @uncheckedVariance @uncheckedCaptures] = null - private var last: ConcatIteratorCell[A @uncheckedVariance @uncheckedCaptures] = null + private final class ConcatIterator[+A](private var current: Iterator[A @uncheckedVariance]) extends AbstractIterator[A] { + private var tail: ConcatIteratorCell[A @uncheckedVariance] = null + private var last: ConcatIteratorCell[A @uncheckedVariance] = null private var currentHasNextChecked = false def hasNext = @@ -1201,8 +1194,8 @@ object Iterator extends IterableFactory[Iterator] { current.next() } else Iterator.empty.next() - override def concat[B >: A](that: => IterableOnce[B]^): Iterator[B]^{this, that} = { - val c: ConcatIteratorCell[A] = new ConcatIteratorCell[B](that, null).asInstanceOf + override def concat[B >: A](that: => IterableOnce[B]): Iterator[B] = { + val c = new ConcatIteratorCell[B](that, null).asInstanceOf[ConcatIteratorCell[A]] if (tail == null) { tail = c last = c @@ -1216,14 +1209,14 @@ object Iterator extends IterableFactory[Iterator] { } } - private[this] final class ConcatIteratorCell[A](head: => IterableOnce[A]^, var tail: ConcatIteratorCell[A @uncheckedCaptures]) { - def headIterator: Iterator[A]^{this} = head.iterator // CC todo: can't use {head} as capture set, gives "cannot establish a reference" + private[this] final class ConcatIteratorCell[A](head: => IterableOnce[A], var tail: ConcatIteratorCell[A]) { + def headIterator: Iterator[A] = head.iterator } /** Creates a delegating iterator capped by a limit count. Negative limit means unbounded. * Lazily skip to start on first evaluation. Avoids daisy-chained iterators due to slicing. */ - private[scala] final class SliceIterator[A](val underlying: Iterator[A]^, start: Int, limit: Int) extends AbstractIterator[A] { + private[scala] final class SliceIterator[A](val underlying: Iterator[A], start: Int, limit: Int) extends AbstractIterator[A] { private[this] var remaining = limit private[this] var dropping = start @inline private def unbounded = remaining < 0 @@ -1254,7 +1247,7 @@ object Iterator extends IterableFactory[Iterator] { else if (unbounded) underlying.next() else empty.next() } - override protected def sliceIterator(from: Int, until: Int): Iterator[A]^{underlying} = { + override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { val lo = from max 0 def adjustedBound = if (unbounded) -1 @@ -1276,9 +1269,9 @@ object Iterator extends IterableFactory[Iterator] { /** Creates an iterator that uses a function `f` to produce elements of * type `A` and update an internal state of type `S`. */ - private final class UnfoldIterator[A, S](init: S)(f: S => Option[(A, S)])extends AbstractIterator[A] { - private[this] var state: S @uncheckedCaptures = init - private[this] var nextResult: Option[(A, S)] @uncheckedCaptures = null + private final class UnfoldIterator[A, S](init: S)(f: S => Option[(A, S)]) extends AbstractIterator[A] { + private[this] var state: S = init + private[this] var nextResult: Option[(A, S)] = null override def hasNext: Boolean = { if (nextResult eq null) { @@ -1304,5 +1297,4 @@ object Iterator extends IterableFactory[Iterator] { } /** Explicit instantiation of the `Iterator` trait to reduce class file size in subclasses. */ -abstract class AbstractIterator[+A] extends Iterator[A]: - this: Iterator[A]^ => +abstract class AbstractIterator[+A] extends Iterator[A] diff --git a/tests/pos-special/stdlib/collection/JavaConverters.scala b/tests/pos-special/stdlib/collection/JavaConverters.scala new file mode 100644 index 000000000000..569e4e8c60a7 --- /dev/null +++ b/tests/pos-special/stdlib/collection/JavaConverters.scala @@ -0,0 +1,335 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +import scala.collection.convert._ +import scala.language.implicitConversions + +/** A variety of decorators that enable converting between + * Scala and Java collections using extension methods, `asScala` and `asJava`. + * + * The extension methods return adapters for the corresponding API. + * + * The following conversions are supported via `asScala` and `asJava`: + *{{{ + * scala.collection.Iterable <=> java.lang.Iterable + * scala.collection.Iterator <=> java.util.Iterator + * scala.collection.mutable.Buffer <=> java.util.List + * scala.collection.mutable.Set <=> java.util.Set + * scala.collection.mutable.Map <=> java.util.Map + * scala.collection.concurrent.Map <=> java.util.concurrent.ConcurrentMap + *}}} + * The following conversions are supported via `asScala` and through + * specially-named extension methods to convert to Java collections, as shown: + *{{{ + * scala.collection.Iterable <=> java.util.Collection (via asJavaCollection) + * scala.collection.Iterator <=> java.util.Enumeration (via asJavaEnumeration) + * scala.collection.mutable.Map <=> java.util.Dictionary (via asJavaDictionary) + *}}} + * In addition, the following one-way conversions are provided via `asJava`: + *{{{ + * scala.collection.Seq => java.util.List + * scala.collection.mutable.Seq => java.util.List + * scala.collection.Set => java.util.Set + * scala.collection.Map => java.util.Map + *}}} + * The following one way conversion is provided via `asScala`: + *{{{ + * java.util.Properties => scala.collection.mutable.Map + *}}} + * In all cases, converting from a source type to a target type and back + * again will return the original source object. For example: + * {{{ + * import scala.collection.JavaConverters._ + * + * val source = new scala.collection.mutable.ListBuffer[Int] + * val target: java.util.List[Int] = source.asJava + * val other: scala.collection.mutable.Buffer[Int] = target.asScala + * assert(source eq other) + * }}} + * Alternatively, the conversion methods have descriptive names and can be invoked explicitly. + * {{{ + * scala> val vs = java.util.Arrays.asList("hi", "bye") + * vs: java.util.List[String] = [hi, bye] + * + * scala> val ss = asScalaIterator(vs.iterator) + * ss: Iterator[String] = + * + * scala> .toList + * res0: List[String] = List(hi, bye) + * + * scala> val ss = asScalaBuffer(vs) + * ss: scala.collection.mutable.Buffer[String] = Buffer(hi, bye) + * }}} + */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +object JavaConverters extends AsJavaConverters with AsScalaConverters { + @deprecated("Use `asJava` instead", "2.13.0") + def asJavaIterator[A](i: Iterator[A]): ju.Iterator[A] = asJava(i) + + @deprecated("Use `asJava` instead", "2.13.0") + def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = asJava(i) + + @deprecated("Use `asJava` instead", "2.13.0") + def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = asJava(b) + + @deprecated("Use `asJava` instead", "2.13.0") + def mutableSeqAsJavaList[A](s: mutable.Seq[A]): ju.List[A] = asJava(s) + + @deprecated("Use `asJava` instead", "2.13.0") + def seqAsJavaList[A](s: Seq[A]): ju.List[A] = asJava(s) + + @deprecated("Use `asJava` instead", "2.13.0") + def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = asJava(s) + + @deprecated("Use `asJava` instead", "2.13.0") + def setAsJavaSet[A](s: Set[A]): ju.Set[A] = asJava(s) + + @deprecated("Use `asJava` instead", "2.13.0") + def mutableMapAsJavaMap[K, V](m: mutable.Map[K, V]): ju.Map[K, V] = asJava(m) + + @deprecated("Use `asJava` instead", "2.13.0") + def mapAsJavaMap[K, V](m: Map[K, V]): ju.Map[K, V] = asJava(m) + + @deprecated("Use `asJava` instead", "2.13.0") + def mapAsJavaConcurrentMap[K, V](m: concurrent.Map[K, V]): juc.ConcurrentMap[K, V] = asJava(m) + + + @deprecated("Use `asScala` instead", "2.13.0") + def asScalaIterator[A](i: ju.Iterator[A]): Iterator[A] = asScala(i) + + @deprecated("Use `asScala` instead", "2.13.0") + def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = asScala(i) + + @deprecated("Use `asScala` instead", "2.13.0") + def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = asScala(i) + + @deprecated("Use `asScala` instead", "2.13.0") + def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = asScala(i) + + @deprecated("Use `asScala` instead", "2.13.0") + def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = asScala(l) + + @deprecated("Use `asScala` instead", "2.13.0") + def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = asScala(s) + + @deprecated("Use `asScala` instead", "2.13.0") + def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = asScala(m) + + @deprecated("Use `asScala` instead", "2.13.0") + def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = asScala(m) + + @deprecated("Use `asScala` instead", "2.13.0") + def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = asScala(p) + + @deprecated("Use `asScala` instead", "2.13.0") + def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = asScala(p) + + // Deprecated implicit conversions for code that directly imports them + + /** + * Adds an `asJava` method that implicitly converts a Scala `Iterator` to a Java `Iterator`. + * @see [[asJavaIterator]] + */ + implicit def asJavaIteratorConverter[A](i : Iterator[A]): AsJava[ju.Iterator[A]] = + new AsJava(asJavaIterator(i)) + + /** + * Adds an `asJavaEnumeration` method that implicitly converts a Scala `Iterator` to a Java `Enumeration`. + * @see [[asJavaEnumeration]] + */ + implicit def asJavaEnumerationConverter[A](i : Iterator[A]): AsJavaEnumeration[A] = + new AsJavaEnumeration(i) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Iterable` to a Java `Iterable`. + * @see [[asJavaIterable]] + */ + implicit def asJavaIterableConverter[A](i : Iterable[A]): AsJava[jl.Iterable[A]] = + new AsJava(asJavaIterable(i)) + + /** + * Adds an `asJavaCollection` method that implicitly converts a Scala `Iterable` to an immutable Java `Collection`. + * @see [[asJavaCollection]] + */ + implicit def asJavaCollectionConverter[A](i : Iterable[A]): AsJavaCollection[A] = + new AsJavaCollection(i) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Buffer` to a Java `List`. + * @see [[bufferAsJavaList]] + */ + implicit def bufferAsJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] = + new AsJava(bufferAsJavaList(b)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Seq` to a Java `List`. + * @see [[mutableSeqAsJavaList]] + */ + implicit def mutableSeqAsJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] = + new AsJava(mutableSeqAsJavaList(b)) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Seq` to a Java `List`. + * @see [[seqAsJavaList]] + */ + implicit def seqAsJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] = + new AsJava(seqAsJavaList(b)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Set` to a Java `Set`. + * @see [[mutableSetAsJavaSet]] + */ + implicit def mutableSetAsJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] = + new AsJava(mutableSetAsJavaSet(s)) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Set` to a Java `Set`. + * @see [[setAsJavaSet]] + */ + implicit def setAsJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] = + new AsJava(setAsJavaSet(s)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Map` to a Java `Map`. + * @see [[mutableMapAsJavaMap]] + */ + implicit def mutableMapAsJavaMapConverter[K, V](m : mutable.Map[K, V]): AsJava[ju.Map[K, V]] = + new AsJava(mutableMapAsJavaMap(m)) + + /** + * Adds an `asJavaDictionary` method that implicitly converts a Scala mutable `Map` to a Java `Dictionary`. + * @see [[asJavaDictionary]] + */ + implicit def asJavaDictionaryConverter[K, V](m : mutable.Map[K, V]): AsJavaDictionary[K, V] = + new AsJavaDictionary(m) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Map` to a Java `Map`. + * @see [[mapAsJavaMap]] + */ + implicit def mapAsJavaMapConverter[K, V](m : Map[K, V]): AsJava[ju.Map[K, V]] = + new AsJava(mapAsJavaMap(m)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`. + * @see [[mapAsJavaConcurrentMap]]. + */ + implicit def mapAsJavaConcurrentMapConverter[K, V](m: concurrent.Map[K, V]): AsJava[juc.ConcurrentMap[K, V]] = + new AsJava(mapAsJavaConcurrentMap(m)) + + + /** + * Adds an `asScala` method that implicitly converts a Java `Iterator` to a Scala `Iterator`. + * @see [[asScalaIterator]] + */ + implicit def asScalaIteratorConverter[A](i : ju.Iterator[A]): AsScala[Iterator[A]] = + new AsScala(asScalaIterator(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Enumeration` to a Scala `Iterator`. + * @see [[enumerationAsScalaIterator]] + */ + implicit def enumerationAsScalaIteratorConverter[A](i : ju.Enumeration[A]): AsScala[Iterator[A]] = + new AsScala(enumerationAsScalaIterator(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Iterable` to a Scala `Iterable`. + * @see [[iterableAsScalaIterable]] + */ + implicit def iterableAsScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] = + new AsScala(iterableAsScalaIterable(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Collection` to an Scala `Iterable`. + * @see [[collectionAsScalaIterable]] + */ + implicit def collectionAsScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] = + new AsScala(collectionAsScalaIterable(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `List` to a Scala mutable `Buffer`. + * @see [[asScalaBuffer]] + */ + implicit def asScalaBufferConverter[A](l : ju.List[A]): AsScala[mutable.Buffer[A]] = + new AsScala(asScalaBuffer(l)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Set` to a Scala mutable `Set`. + * @see [[asScalaSet]] + */ + implicit def asScalaSetConverter[A](s : ju.Set[A]): AsScala[mutable.Set[A]] = + new AsScala(asScalaSet(s)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Map` to a Scala mutable `Map`. + * @see [[mapAsScalaMap]] + */ + implicit def mapAsScalaMapConverter[K, V](m : ju.Map[K, V]): AsScala[mutable.Map[K, V]] = + new AsScala(mapAsScalaMap(m)) + + /** + * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap` to a Scala mutable `concurrent.Map`. + * @see [[mapAsScalaConcurrentMap]] + */ + implicit def mapAsScalaConcurrentMapConverter[K, V](m: juc.ConcurrentMap[K, V]): AsScala[concurrent.Map[K, V]] = + new AsScala(mapAsScalaConcurrentMap(m)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Dictionary` to a Scala mutable `Map`. + * @see [[dictionaryAsScalaMap]] + */ + implicit def dictionaryAsScalaMapConverter[K, V](p: ju.Dictionary[K, V]): AsScala[mutable.Map[K, V]] = + new AsScala(dictionaryAsScalaMap(p)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Properties` to a Scala mutable `Map[String, String]`. + * @see [[propertiesAsScalaMap]] + */ + implicit def propertiesAsScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] = + new AsScala(propertiesAsScalaMap(p)) + + + /** Generic class containing the `asJava` converter method */ + class AsJava[A](op: => A) { + /** Converts a Scala collection to the corresponding Java collection */ + def asJava: A = op + } + + /** Generic class containing the `asScala` converter method */ + class AsScala[A](op: => A) { + /** Converts a Java collection to the corresponding Scala collection */ + def asScala: A = op + } + + /** Generic class containing the `asJavaCollection` converter method */ + class AsJavaCollection[A](i: Iterable[A]) { + /** Converts a Scala `Iterable` to a Java `Collection` */ + def asJavaCollection: ju.Collection[A] = JavaConverters.asJavaCollection(i) + } + + /** Generic class containing the `asJavaEnumeration` converter method */ + class AsJavaEnumeration[A](i: Iterator[A]) { + /** Converts a Scala `Iterator` to a Java `Enumeration` */ + def asJavaEnumeration: ju.Enumeration[A] = JavaConverters.asJavaEnumeration(i) + } + + /** Generic class containing the `asJavaDictionary` converter method */ + class AsJavaDictionary[K, V](m : mutable.Map[K, V]) { + /** Converts a Scala `Map` to a Java `Dictionary` */ + def asJavaDictionary: ju.Dictionary[K, V] = JavaConverters.asJavaDictionary(m) + } +} diff --git a/tests/pos-special/stdlib/collection/LazyZipOps.scala b/tests/pos-special/stdlib/collection/LazyZipOps.scala new file mode 100644 index 000000000000..0553eb8edf7f --- /dev/null +++ b/tests/pos-special/stdlib/collection/LazyZipOps.scala @@ -0,0 +1,422 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.language.implicitConversions + +/** Decorator representing lazily zipped pairs. + * + * @define coll pair + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + */ +final class LazyZip2[+El1, +El2, C1] private[collection](src: C1, coll1: Iterable[El1], coll2: Iterable[El2]) { + + /** Zips `that` iterable collection with an existing `LazyZip2`. The elements in each collection are + * not consumed until a strict operation is invoked on the returned `LazyZip3` decorator. + * + * @param that the iterable providing the third element of each eventual triple + * @tparam B the type of the third element in each eventual triple + * @return a decorator `LazyZip3` that allows strict operations to be performed on the lazily evaluated tuples or + * chained calls to `lazyZip`. Implicit conversion to `Iterable[(El1, El2, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]): LazyZip3[El1, El2, B, C1] = new LazyZip3(src, coll1, coll2, that) + + def map[B, C](f: (El1, El2) => B)(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + def hasNext = elems1.hasNext && elems2.hasNext + def next() = f(elems1.next(), elems2.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty + }) + } + + def flatMap[B, C](f: (El1, El2) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] var _current: Iterator[B] = Iterator.empty + private def current = { + while (!_current.hasNext && elems1.hasNext && elems2.hasNext) + _current = f(elems1.next(), elems2.next()).iterator + _current + } + def hasNext = current.hasNext + def next() = current.next() + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty + }) + } + + def filter[C](p: (El1, El2) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2), C]): C = { + bf.fromSpecific(src)(new AbstractView[(El1, El2)] { + def iterator = new AbstractIterator[(El1, El2)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] var _current: (El1, El2) = _ + private def current = { + while ((_current eq null) && elems1.hasNext && elems2.hasNext) { + val e1 = elems1.next() + val e2 = elems2.next() + if (p(e1, e2)) _current = (e1, e2) + } + _current + } + def hasNext = current ne null + def next() = { + val c = current + if (c ne null) { + _current = null + c + } else Iterator.empty.next() + } + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.hasNext + }) + } + + def exists(p: (El1, El2) => Boolean): Boolean = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + var res = false + + while (!res && elems1.hasNext && elems2.hasNext) res = p(elems1.next(), elems2.next()) + + res + } + + def forall(p: (El1, El2) => Boolean): Boolean = !exists((el1, el2) => !p(el1, el2)) + + def foreach[U](f: (El1, El2) => U): Unit = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + + while (elems1.hasNext && elems2.hasNext) f(elems1.next(), elems2.next()) + } + + private def toIterable: View[(El1, El2)] = new AbstractView[(El1, El2)] { + def iterator = new AbstractIterator[(El1, El2)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + def hasNext = elems1.hasNext && elems2.hasNext + def next() = (elems1.next(), elems2.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty + } + + private def zipKnownSize: Int = { + val s1 = coll1.knownSize + if (s1 == 0) 0 else { + val s2 = coll2.knownSize + if (s2 == 0) 0 else s1 min s2 + } + } + + override def toString = s"$coll1.lazyZip($coll2)" +} + +object LazyZip2 { + implicit def lazyZip2ToIterable[El1, El2](zipped2: LazyZip2[El1, El2, _]): View[(El1, El2)] = zipped2.toIterable +} + + +/** Decorator representing lazily zipped triples. + * + * @define coll triple + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + */ +final class LazyZip3[+El1, +El2, +El3, C1] private[collection](src: C1, + coll1: Iterable[El1], + coll2: Iterable[El2], + coll3: Iterable[El3]) { + + /** Zips `that` iterable collection with an existing `LazyZip3`. The elements in each collection are + * not consumed until a strict operation is invoked on the returned `LazyZip4` decorator. + * + * @param that the iterable providing the fourth element of each eventual 4-tuple + * @tparam B the type of the fourth element in each eventual 4-tuple + * @return a decorator `LazyZip4` that allows strict operations to be performed on the lazily evaluated tuples. + * Implicit conversion to `Iterable[(El1, El2, El3, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]): LazyZip4[El1, El2, El3, B, C1] = new LazyZip4(src, coll1, coll2, coll3, that) + + def map[B, C](f: (El1, El2, El3) => B)(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext + def next() = f(elems1.next(), elems2.next(), elems3.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty + }) + } + + def flatMap[B, C](f: (El1, El2, El3) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] var _current: Iterator[B] = Iterator.empty + private def current = { + while (!_current.hasNext && elems1.hasNext && elems2.hasNext && elems3.hasNext) + _current = f(elems1.next(), elems2.next(), elems3.next()).iterator + _current + } + def hasNext = current.hasNext + def next() = current.next() + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + }) + } + + def filter[C](p: (El1, El2, El3) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2, El3), C]): C = { + bf.fromSpecific(src)(new AbstractView[(El1, El2, El3)] { + def iterator = new AbstractIterator[(El1, El2, El3)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] var _current: (El1, El2, El3) = _ + private def current = { + while ((_current eq null) && elems1.hasNext && elems2.hasNext && elems3.hasNext) { + val e1 = elems1.next() + val e2 = elems2.next() + val e3 = elems3.next() + if (p(e1, e2, e3)) _current = (e1, e2, e3) + } + _current + } + def hasNext = current ne null + def next() = { + val c = current + if (c ne null) { + _current = null + c + } else Iterator.empty.next() + } + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + }) + } + + def exists(p: (El1, El2, El3) => Boolean): Boolean = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator + var res = false + + while (!res && elems1.hasNext && elems2.hasNext && elems3.hasNext) + res = p(elems1.next(), elems2.next(), elems3.next()) + + res + } + + def forall(p: (El1, El2, El3) => Boolean): Boolean = !exists((el1, el2, el3) => !p(el1, el2, el3)) + + def foreach[U](f: (El1, El2, El3) => U): Unit = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator + + while (elems1.hasNext && elems2.hasNext && elems3.hasNext) + f(elems1.next(), elems2.next(), elems3.next()) + } + + private def toIterable: View[(El1, El2, El3)] = new AbstractView[(El1, El2, El3)] { + def iterator = new AbstractIterator[(El1, El2, El3)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext + def next() = (elems1.next(), elems2.next(), elems3.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty + } + + private def zipKnownSize: Int = { + val s1 = coll1.knownSize + if (s1 == 0) 0 else { + val s2 = coll2.knownSize + if (s2 == 0) 0 else { + val s3 = coll3.knownSize + if (s3 == 0) 0 else s1 min s2 min s3 + } + } + } + + override def toString = s"$coll1.lazyZip($coll2).lazyZip($coll3)" +} + +object LazyZip3 { + implicit def lazyZip3ToIterable[El1, El2, El3](zipped3: LazyZip3[El1, El2, El3, _]): View[(El1, El2, El3)] = zipped3.toIterable +} + + + +/** Decorator representing lazily zipped 4-tuples. + * + * @define coll tuple + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + */ +final class LazyZip4[+El1, +El2, +El3, +El4, C1] private[collection](src: C1, + coll1: Iterable[El1], + coll2: Iterable[El2], + coll3: Iterable[El3], + coll4: Iterable[El4]) { + + def map[B, C](f: (El1, El2, El3, El4) => B)(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] val elems4 = coll4.iterator + def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext + def next() = f(elems1.next(), elems2.next(), elems3.next(), elems4.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty || coll4.isEmpty + }) + } + + def flatMap[B, C](f: (El1, El2, El3, El4) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = { + bf.fromSpecific(src)(new AbstractView[B] { + def iterator = new AbstractIterator[B] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] val elems4 = coll4.iterator + private[this] var _current: Iterator[B] = Iterator.empty + private def current = { + while (!_current.hasNext && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) + _current = f(elems1.next(), elems2.next(), elems3.next(), elems4.next()).iterator + _current + } + def hasNext = current.hasNext + def next() = current.next() + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0 || coll4.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + }) + } + + def filter[C](p: (El1, El2, El3, El4) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2, El3, El4), C]): C = { + bf.fromSpecific(src)(new AbstractView[(El1, El2, El3, El4)] { + def iterator = new AbstractIterator[(El1, El2, El3, El4)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] val elems4 = coll4.iterator + private[this] var _current: (El1, El2, El3, El4) = _ + private def current = { + while ((_current eq null) && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) { + val e1 = elems1.next() + val e2 = elems2.next() + val e3 = elems3.next() + val e4 = elems4.next() + if (p(e1, e2, e3, e4)) _current = (e1, e2, e3, e4) + } + _current + } + def hasNext = current ne null + def next() = { + val c = current + if (c ne null) { + _current = null + c + } else Iterator.empty.next() + } + } + override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0 || coll4.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + }) + } + + def exists(p: (El1, El2, El3, El4) => Boolean): Boolean = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator + val elems4 = coll4.iterator + var res = false + + while (!res && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) + res = p(elems1.next(), elems2.next(), elems3.next(), elems4.next()) + + res + } + + def forall(p: (El1, El2, El3, El4) => Boolean): Boolean = !exists((el1, el2, el3, el4) => !p(el1, el2, el3, el4)) + + def foreach[U](f: (El1, El2, El3, El4) => U): Unit = { + val elems1 = coll1.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator + val elems4 = coll4.iterator + + while (elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) + f(elems1.next(), elems2.next(), elems3.next(), elems4.next()) + } + + private def toIterable: View[(El1, El2, El3, El4)] = new AbstractView[(El1, El2, El3, El4)] { + def iterator = new AbstractIterator[(El1, El2, El3, El4)] { + private[this] val elems1 = coll1.iterator + private[this] val elems2 = coll2.iterator + private[this] val elems3 = coll3.iterator + private[this] val elems4 = coll4.iterator + def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext + def next() = (elems1.next(), elems2.next(), elems3.next(), elems4.next()) + } + override def knownSize: Int = zipKnownSize + override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty || coll4.isEmpty + } + + private def zipKnownSize: Int = { + val s1 = coll1.knownSize + if (s1 == 0) 0 else { + val s2 = coll2.knownSize + if (s2 == 0) 0 else { + val s3 = coll3.knownSize + if (s3 == 0) 0 else { + val s4 = coll4.knownSize + if (s4 == 0) 0 else s1 min s2 min s3 min s4 + } + } + } + } + + override def toString = s"$coll1.lazyZip($coll2).lazyZip($coll3).lazyZip($coll4)" +} + +object LazyZip4 { + implicit def lazyZip4ToIterable[El1, El2, El3, El4](zipped4: LazyZip4[El1, El2, El3, El4, _]): View[(El1, El2, El3, El4)] = + zipped4.toIterable +} diff --git a/tests/pos-special/stdlib/collection/LinearSeq.scala b/tests/pos-special/stdlib/collection/LinearSeq.scala index 393f5fda4187..449d58c866e3 100644 --- a/tests/pos-special/stdlib/collection/LinearSeq.scala +++ b/tests/pos-special/stdlib/collection/LinearSeq.scala @@ -14,7 +14,6 @@ package scala package collection import scala.annotation.{nowarn, tailrec} -import language.experimental.captureChecking /** Base trait for linearly accessed sequences that have efficient `head` and * `tail` operations. @@ -33,7 +32,7 @@ trait LinearSeq[+A] extends Seq[A] object LinearSeq extends SeqFactory.Delegate[LinearSeq](immutable.LinearSeq) /** Base trait for linear Seq operations */ -trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeqOps[A, CC, C]] extends AnyRef with SeqOps[A, CC, C] { +trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeqOps[A, CC, C]] extends Any with SeqOps[A, CC, C] { /** @inheritdoc * @@ -97,7 +96,7 @@ trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeq else loop(0, coll) } - override def lengthCompare(that: Iterable[_]^): Int = { + override def lengthCompare(that: Iterable[_]): Int = { val thatKnownSize = that.knownSize if (thatKnownSize >= 0) this lengthCompare thatKnownSize @@ -187,7 +186,7 @@ trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeq acc } - override def sameElements[B >: A](that: IterableOnce[B]^): Boolean = { + override def sameElements[B >: A](that: IterableOnce[B]): Boolean = { @tailrec def linearSeqEq(a: LinearSeq[B], b: LinearSeq[B]): Boolean = (a eq b) || { if (a.nonEmpty && b.nonEmpty && a.head == b.head) { @@ -260,7 +259,7 @@ trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeq } } -trait StrictOptimizedLinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with StrictOptimizedLinearSeqOps[A, CC, C]] extends AnyRef with LinearSeqOps[A, CC, C] with StrictOptimizedSeqOps[A, CC, C] { +trait StrictOptimizedLinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with StrictOptimizedLinearSeqOps[A, CC, C]] extends Any with LinearSeqOps[A, CC, C] with StrictOptimizedSeqOps[A, CC, C] { // A more efficient iterator implementation than the default LinearSeqIterator override def iterator: Iterator[A] = new AbstractIterator[A] { private[this] var current = StrictOptimizedLinearSeqOps.this diff --git a/tests/pos-special/stdlib/collection/Map.scala b/tests/pos-special/stdlib/collection/Map.scala index ef4f915ea573..0fb6df9a06dc 100644 --- a/tests/pos-special/stdlib/collection/Map.scala +++ b/tests/pos-special/stdlib/collection/Map.scala @@ -17,7 +17,6 @@ import scala.annotation.nowarn import scala.collection.generic.DefaultSerializable import scala.collection.mutable.StringBuilder import scala.util.hashing.MurmurHash3 -import language.experimental.captureChecking /** Base Map type */ trait Map[K, +V] @@ -132,7 +131,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] /** Similar to `fromIterable`, but returns a Map collection type. * Note that the return type is now `CC[K2, V2]`. */ - @`inline` protected final def mapFromIterable[K2, V2](it: Iterable[(K2, V2)]^): CC[K2, V2] = mapFactory.from(it) + @`inline` protected final def mapFromIterable[K2, V2](it: Iterable[(K2, V2)]): CC[K2, V2] = mapFactory.from(it) /** The companion object of this map, providing various factory methods. * @@ -319,7 +318,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * @return a new $coll resulting from applying the given collection-valued function * `f` to each element of this $coll and concatenating the results. */ - def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = mapFactory.from(new View.FlatMap(this, f)) + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = mapFactory.from(new View.FlatMap(this, f)) /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the * right hand operand. The element type of the $coll is the most specific superclass encompassing @@ -329,7 +328,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * @return a new $coll which contains all elements * of this $coll followed by all elements of `suffix`. */ - def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]^): CC[K, V2] = mapFactory.from(suffix match { + def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]): CC[K, V2] = mapFactory.from(suffix match { case it: Iterable[(K, V2)] => new View.Concat(this, it) case _ => iterator.concat(suffix.iterator) }) @@ -337,7 +336,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] // Not final because subclasses refine the result type, e.g. in SortedMap, the result type is // SortedMap's CC, while Map's CC is fixed to Map /** Alias for `concat` */ - /*@`inline` final*/ def ++ [V2 >: V](xs: collection.IterableOnce[(K, V2)]^): CC[K, V2] = concat(xs) + /*@`inline` final*/ def ++ [V2 >: V](xs: collection.IterableOnce[(K, V2)]): CC[K, V2] = concat(xs) override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = iterator.map { case (k, v) => s"$k -> $v" }.addString(sb, start, sep, end) @@ -351,14 +350,14 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] mapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) @deprecated("Consider requiring an immutable Map.", "2.13.0") - @`inline` def -- (keys: IterableOnce[K]^): C = { + @`inline` def -- (keys: IterableOnce[K]): C = { lazy val keysSet = keys.iterator.to(immutable.Set) fromSpecific(this.view.filterKeys(k => !keysSet.contains(k))) } @deprecated("Use ++ instead of ++: for collections of type Iterable", "2.13.0") - def ++: [V1 >: V](that: IterableOnce[(K,V1)]^): CC[K,V1] = { - val thatIterable: Iterable[(K, V1)]^{that} = that match { + def ++: [V1 >: V](that: IterableOnce[(K,V1)]): CC[K,V1] = { + val thatIterable: Iterable[(K, V1)] = that match { case that: Iterable[(K, V1)] => that case that => View.from(that) } @@ -381,10 +380,10 @@ object MapOps { def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = self.mapFactory.from(new View.Map(filtered, f)) - def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = self.mapFactory.from(new View.FlatMap(filtered, f)) - override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, CC]^{p, q} = + override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, CC] = new WithFilter[K, V, IterableCC, CC](self, (kv: (K, V)) => p(kv) && q(kv)) } diff --git a/tests/pos-special/stdlib/collection/MapView.scala b/tests/pos-special/stdlib/collection/MapView.scala new file mode 100644 index 000000000000..7f84178a7c16 --- /dev/null +++ b/tests/pos-special/stdlib/collection/MapView.scala @@ -0,0 +1,187 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.nowarn +import scala.collection.MapView.SomeMapOps +import scala.collection.mutable.Builder + +trait MapView[K, +V] + extends MapOps[K, V, ({ type l[X, Y] = View[(X, Y)] })#l, View[(K, V)]] + with View[(K, V)] { + + override def view: MapView[K, V] = this + + // Ideally this returns a `View`, but bincompat + /** Creates a view over all keys of this map. + * + * @return the keys of this map as a view. + */ + override def keys: Iterable[K] = new MapView.Keys(this) + + // Ideally this returns a `View`, but bincompat + /** Creates a view over all values of this map. + * + * @return the values of this map as a view. + */ + override def values: Iterable[V] = new MapView.Values(this) + + /** Filters this map by retaining only keys satisfying a predicate. + * @param p the predicate used to test keys + * @return an immutable map consisting only of those key value pairs of this map where the key satisfies + * the predicate `p`. The resulting map wraps the original map without copying any elements. + */ + override def filterKeys(p: K => Boolean): MapView[K, V] = new MapView.FilterKeys(this, p) + + /** Transforms this map by applying a function to every retrieved value. + * @param f the function used to transform values of this map. + * @return a map view which maps every key of this map + * to `f(this(key))`. The resulting map wraps the original map without copying any elements. + */ + override def mapValues[W](f: V => W): MapView[K, W] = new MapView.MapValues(this, f) + + override def filter(pred: ((K, V)) => Boolean): MapView[K, V] = new MapView.Filter(this, false, pred) + + override def filterNot(pred: ((K, V)) => Boolean): MapView[K, V] = new MapView.Filter(this, true, pred) + + override def partition(p: ((K, V)) => Boolean): (MapView[K, V], MapView[K, V]) = (filter(p), filterNot(p)) + + override def tapEach[U](f: ((K, V)) => U): MapView[K, V] = new MapView.TapEach(this, f) + + def mapFactory: MapViewFactory = MapView + + override def empty: MapView[K, V] = mapFactory.empty + + override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, View, ({ type l[X, Y] = View[(X, Y)] })#l] = new MapOps.WithFilter(this, p) + + override def toString: String = super[View].toString + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "MapView" +} + +object MapView extends MapViewFactory { + + /** An `IterableOps` whose collection type and collection type constructor are unknown */ + type SomeIterableConstr[X, Y] = IterableOps[_, AnyConstr, _] + /** A `MapOps` whose collection type and collection type constructor are (mostly) unknown */ + type SomeMapOps[K, +V] = MapOps[K, V, SomeIterableConstr, _] + + @SerialVersionUID(3L) + private val EmptyMapView: MapView[Any, Nothing] = new AbstractMapView[Any, Nothing] { + override def get(key: Any): Option[Nothing] = None + override def iterator: Iterator[Nothing] = Iterator.empty[Nothing] + override def knownSize: Int = 0 + override def isEmpty: Boolean = true + override def filterKeys(p: Any => Boolean): MapView[Any, Nothing] = this + override def mapValues[W](f: Nothing => W): MapView[Any, Nothing] = this + override def filter(pred: ((Any, Nothing)) => Boolean): MapView[Any, Nothing] = this + override def filterNot(pred: ((Any, Nothing)) => Boolean): MapView[Any, Nothing] = this + override def partition(p: ((Any, Nothing)) => Boolean): (MapView[Any, Nothing], MapView[Any, Nothing]) = (this, this) + } + + @SerialVersionUID(3L) + class Id[K, +V](underlying: SomeMapOps[K, V]) extends AbstractMapView[K, V] { + def get(key: K): Option[V] = underlying.get(key) + def iterator: Iterator[(K, V)] = underlying.iterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + // Ideally this is public, but bincompat + @SerialVersionUID(3L) + private class Keys[K](underlying: SomeMapOps[K, _]) extends AbstractView[K] { + def iterator: Iterator[K] = underlying.keysIterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + // Ideally this is public, but bincompat + @SerialVersionUID(3L) + private class Values[+V](underlying: SomeMapOps[_, V]) extends AbstractView[V] { + def iterator: Iterator[V] = underlying.valuesIterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class MapValues[K, +V, +W](underlying: SomeMapOps[K, V], f: V => W) extends AbstractMapView[K, W] { + def iterator: Iterator[(K, W)] = underlying.iterator.map(kv => (kv._1, f(kv._2))) + def get(key: K): Option[W] = underlying.get(key).map(f) + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class FilterKeys[K, +V](underlying: SomeMapOps[K, V], p: K => Boolean) extends AbstractMapView[K, V] { + def iterator: Iterator[(K, V)] = underlying.iterator.filter { case (k, _) => p(k) } + def get(key: K): Option[V] = if (p(key)) underlying.get(key) else None + override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + } + + @SerialVersionUID(3L) + class Filter[K, +V](underlying: SomeMapOps[K, V], isFlipped: Boolean, p: ((K, V)) => Boolean) extends AbstractMapView[K, V] { + def iterator: Iterator[(K, V)] = underlying.iterator.filterImpl(p, isFlipped) + def get(key: K): Option[V] = underlying.get(key) match { + case s @ Some(v) if p((key, v)) != isFlipped => s + case _ => None + } + override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize + override def isEmpty: Boolean = iterator.isEmpty + } + + @SerialVersionUID(3L) + class TapEach[K, +V, +U](underlying: SomeMapOps[K, V], f: ((K, V)) => U) extends AbstractMapView[K, V] { + override def get(key: K): Option[V] = { + underlying.get(key) match { + case s @ Some(v) => + f((key, v)) + s + case None => None + } + } + override def iterator: Iterator[(K, V)] = underlying.iterator.tapEach(f) + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + override def newBuilder[X, Y]: Builder[(X, Y), MapView[X, Y]] = mutable.HashMap.newBuilder[X, Y].mapResult(_.view) + + override def empty[K, V]: MapView[K, V] = EmptyMapView.asInstanceOf[MapView[K, V]] + + override def from[K, V](it: IterableOnce[(K, V)]): View[(K, V)] = View.from(it) + + override def from[K, V](it: SomeMapOps[K, V]): MapView[K, V] = it match { + case mv: MapView[K, V] => mv + case other => new MapView.Id(other) + } + + override def apply[K, V](elems: (K, V)*): MapView[K, V] = from(elems.toMap) +} + +trait MapViewFactory extends collection.MapFactory[({ type l[X, Y] = View[(X, Y)]})#l] { + + def newBuilder[X, Y]: Builder[(X, Y), MapView[X, Y]] + + def empty[X, Y]: MapView[X, Y] + + def from[K, V](it: SomeMapOps[K, V]): MapView[K, V] + + override def apply[K, V](elems: (K, V)*): MapView[K, V] = from(elems.toMap) +} + +/** Explicit instantiation of the `MapView` trait to reduce class file size in subclasses. */ +@SerialVersionUID(3L) +abstract class AbstractMapView[K, +V] extends AbstractView[(K, V)] with MapView[K, V] + diff --git a/tests/pos-special/stdlib/collection/Searching.scala b/tests/pos-special/stdlib/collection/Searching.scala new file mode 100644 index 000000000000..874a06449aa9 --- /dev/null +++ b/tests/pos-special/stdlib/collection/Searching.scala @@ -0,0 +1,57 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.language.implicitConversions +import scala.collection.generic.IsSeq + +object Searching { + + /** The result of performing a search on a sorted sequence + * + * Example usage: + * + * {{{ + * val list = List(1, 3, 4, 5) // list must be sorted before searching + * list.search(4) // Found(2) + * list.search(2) // InsertionPoint(1) + * }}} + * + * */ + sealed abstract class SearchResult { + /** The index corresponding to the element searched for in the sequence, if it was found, + * or the index where the element would be inserted in the sequence, if it was not in the sequence */ + def insertionPoint: Int + } + + /** The result of performing a search on a sorted sequence, where the element was found. + * + * @param foundIndex the index corresponding to the element searched for in the sequence + */ + case class Found(foundIndex: Int) extends SearchResult { + override def insertionPoint: Int = foundIndex + } + + /** The result of performing a search on a sorted sequence, where the element was not found + * + * @param insertionPoint the index where the element would be inserted in the sequence + */ + case class InsertionPoint(insertionPoint: Int) extends SearchResult + + @deprecated("Search methods are defined directly on SeqOps and do not require scala.collection.Searching any more", "2.13.0") + class SearchImpl[Repr, A](private val coll: SeqOps[A, AnyConstr, _]) extends AnyVal + + @deprecated("Search methods are defined directly on SeqOps and do not require scala.collection.Searching any more", "2.13.0") + implicit def search[Repr, A](coll: Repr)(implicit fr: IsSeq[Repr]): SearchImpl[Repr, fr.A] = + new SearchImpl(fr.conversion(coll)) +} diff --git a/tests/pos-special/stdlib/collection/Seq.scala b/tests/pos-special/stdlib/collection/Seq.scala index caabf6fa6436..d960838fdcb7 100644 --- a/tests/pos-special/stdlib/collection/Seq.scala +++ b/tests/pos-special/stdlib/collection/Seq.scala @@ -16,8 +16,6 @@ import scala.collection.immutable.Range import scala.util.hashing.MurmurHash3 import Searching.{Found, InsertionPoint, SearchResult} import scala.annotation.nowarn -import language.experimental.captureChecking -import caps.unsafe.unsafeAssumePure /** Base trait for sequence collections * @@ -29,7 +27,6 @@ trait Seq[+A] with SeqOps[A, Seq, Seq[A]] with IterableFactoryDefaults[A, Seq] with Equals { - this: Seq[A] => override def iterableFactory: SeqFactory[Seq] = Seq @@ -77,7 +74,8 @@ object Seq extends SeqFactory.Delegate[Seq](immutable.Seq) * @define coll sequence * @define Coll `Seq` */ -trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => +trait SeqOps[+A, +CC[_], +C] extends Any + with IterableOps[A, CC, C] { self => override def view: SeqView[A] = new SeqView.Id[A](this) @@ -162,13 +160,13 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => * @return a new $coll which contains all elements of `prefix` followed * by all the elements of this $coll. */ - def prependedAll[B >: A](prefix: IterableOnce[B]^): CC[B] = iterableFactory.from(prefix match { + def prependedAll[B >: A](prefix: IterableOnce[B]): CC[B] = iterableFactory.from(prefix match { case prefix: Iterable[B] => new View.Concat(prefix, this) case _ => prefix.iterator ++ iterator }) /** Alias for `prependedAll` */ - @`inline` override final def ++: [B >: A](prefix: IterableOnce[B]^): CC[B] = prependedAll(prefix) + @`inline` override final def ++: [B >: A](prefix: IterableOnce[B]): CC[B] = prependedAll(prefix) /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the * right hand operand. The element type of the $coll is the most specific superclass encompassing @@ -179,15 +177,14 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => * @return a new collection of type `CC[B]` which contains all elements * of this $coll followed by all elements of `suffix`. */ - def appendedAll[B >: A](suffix: IterableOnce[B]^): CC[B] = - super.concat(suffix).unsafeAssumePure + def appendedAll[B >: A](suffix: IterableOnce[B]): CC[B] = super.concat(suffix) /** Alias for `appendedAll` */ - @`inline` final def :++ [B >: A](suffix: IterableOnce[B]^): CC[B] = appendedAll(suffix) + @`inline` final def :++ [B >: A](suffix: IterableOnce[B]): CC[B] = appendedAll(suffix) // Make `concat` an alias for `appendedAll` so that it benefits from performance // overrides of this method - @`inline` final override def concat[B >: A](suffix: IterableOnce[B]^): CC[B] = appendedAll(suffix) + @`inline` final override def concat[B >: A](suffix: IterableOnce[B]): CC[B] = appendedAll(suffix) /** Produces a new sequence which contains all elements of this $coll and also all elements of * a given sequence. `xs union ys` is equivalent to `xs ++ ys`. @@ -215,7 +212,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => * @tparam B the type of the elements after being transformed by `f` * @return a new $coll consisting of all the elements of this $coll without duplicates. */ - def distinctBy[B](f: A -> B): C = fromSpecific(new View.DistinctBy(this, f)) + def distinctBy[B](f: A => B): C = fromSpecific(new View.DistinctBy(this, f)) /** Returns new $coll with elements in reversed order. * @@ -246,7 +243,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => * @return `true` if the sequence `that` is contained in this $coll at * index `offset`, otherwise `false`. */ - def startsWith[B >: A](that: IterableOnce[B]^, offset: Int = 0): Boolean = { + def startsWith[B >: A](that: IterableOnce[B], offset: Int = 0): Boolean = { val i = iterator drop offset val j = that.iterator while (j.hasNext && i.hasNext) @@ -261,7 +258,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => * @param that the sequence to test * @return `true` if this $coll has `that` as a suffix, `false` otherwise. */ - def endsWith[B >: A](that: Iterable[B]^): Boolean = { + def endsWith[B >: A](that: Iterable[B]): Boolean = { if (that.isEmpty) true else { val i = iterator.drop(length - that.size) @@ -631,9 +628,6 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => private[this] def init() = { val m = mutable.HashMap[A, Int]() - //val s1 = self.toGenericSeq map (e => (e, m.getOrElseUpdate(e, m.size))) - //val s2: Seq[(A, Int)] = s1 sortBy (_._2) - //val (es, is) = s2.unzip(using Predef.$conforms[(A, Int)]) val (es, is) = (self.toGenericSeq map (e => (e, m.getOrElseUpdate(e, m.size))) sortBy (_._2)).unzip (es.to(mutable.ArrayBuffer), is.toArray) @@ -813,7 +807,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => */ def lengthCompare(len: Int): Int = super.sizeCompare(len) - override final def sizeCompare(that: Iterable[_]^): Int = lengthCompare(that) + override final def sizeCompare(that: Iterable[_]): Int = lengthCompare(that) /** Compares the length of this $coll to the size of another `Iterable`. * @@ -828,7 +822,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => * is `O(this.length min that.size)` instead of `O(this.length + that.size)`. * The method should be overridden if computing `size` is cheap and `knownSize` returns `-1`. */ - def lengthCompare(that: Iterable[_]^): Int = super.sizeCompare(that) + def lengthCompare(that: Iterable[_]): Int = super.sizeCompare(that) /** Returns a value class containing operations for comparing the length of this $coll to a test value. * @@ -851,7 +845,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => /** Are the elements of this collection the same (and in the same order) * as those of `that`? */ - def sameElements[B >: A](that: IterableOnce[B]^): Boolean = { + def sameElements[B >: A](that: IterableOnce[B]): Boolean = { val thisKnownSize = knownSize val knownSizeDifference = thisKnownSize != -1 && { val thatKnownSize = that.knownSize @@ -943,7 +937,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => * except that `replaced` elements starting from `from` are replaced * by all the elements of `other`. */ - def patch[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): CC[B] = + def patch[B >: A](from: Int, other: IterableOnce[B], replaced: Int): CC[B] = iterableFactory.from(new View.Patched(this, from, other, replaced)) /** A copy of this $coll with one single replaced element. @@ -1010,11 +1004,11 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self => * @return a `Found` value containing the index corresponding to the element in the * sequence, or the `InsertionPoint` where the element would be inserted if * the element is not in the sequence. - * + * * @note if `to <= from`, the search space is empty, and an `InsertionPoint` at `from` * is returned */ - def search[B >: A](elem: B, from: Int, to: Int) (implicit ord: Ordering[B]): SearchResult = + def search[B >: A](elem: B, from: Int, to: Int) (implicit ord: Ordering[B]): SearchResult = linearSearch(view.slice(from, to), elem, math.max(0, from))(ord) private[this] def linearSearch[B >: A](c: View[A], elem: B, offset: Int) diff --git a/tests/pos-special/stdlib/collection/SeqMap.scala b/tests/pos-special/stdlib/collection/SeqMap.scala new file mode 100644 index 000000000000..05bf126aba02 --- /dev/null +++ b/tests/pos-special/stdlib/collection/SeqMap.scala @@ -0,0 +1,40 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.nowarn + +/** + * A generic trait for ordered maps. Concrete classes have to provide + * functionality for the abstract methods in `SeqMap`. + * + * Note that when checking for equality [[SeqMap]] does not take into account + * ordering. + * + * @tparam K the type of the keys contained in this linked map. + * @tparam V the type of the values associated with the keys in this linked map. + * @define coll immutable seq map + * @define Coll `immutable.SeqMap` + */ + +trait SeqMap[K, +V] extends Map[K, V] + with MapOps[K, V, SeqMap, SeqMap[K, V]] + with MapFactoryDefaults[K, V, SeqMap, Iterable] { + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SeqMap" + + override def mapFactory: MapFactory[SeqMap] = SeqMap +} + +object SeqMap extends MapFactory.Delegate[immutable.SeqMap](immutable.SeqMap) + diff --git a/tests/pos-special/stdlib/collection/SeqView.scala b/tests/pos-special/stdlib/collection/SeqView.scala new file mode 100644 index 000000000000..ad16f01b9184 --- /dev/null +++ b/tests/pos-special/stdlib/collection/SeqView.scala @@ -0,0 +1,209 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.nowarn + + +trait SeqView[+A] extends SeqOps[A, View, View[A]] with View[A] { + override def view: SeqView[A] = this + + override def map[B](f: A => B): SeqView[B] = new SeqView.Map(this, f) + override def appended[B >: A](elem: B): SeqView[B] = new SeqView.Appended(this, elem) + override def prepended[B >: A](elem: B): SeqView[B] = new SeqView.Prepended(elem, this) + override def reverse: SeqView[A] = new SeqView.Reverse(this) + override def take(n: Int): SeqView[A] = new SeqView.Take(this, n) + override def drop(n: Int): SeqView[A] = new SeqView.Drop(this, n) + override def takeRight(n: Int): SeqView[A] = new SeqView.TakeRight(this, n) + override def dropRight(n: Int): SeqView[A] = new SeqView.DropRight(this, n) + override def tapEach[U](f: A => U): SeqView[A] = new SeqView.Map(this, { (a: A) => f(a); a }) + + def concat[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B] = new SeqView.Concat(this, suffix) + def appendedAll[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B] = new SeqView.Concat(this, suffix) + def prependedAll[B >: A](prefix: SeqView.SomeSeqOps[B]): SeqView[B] = new SeqView.Concat(prefix, this) + + override def sorted[B >: A](implicit ord: Ordering[B]): SeqView[A] = new SeqView.Sorted(this, ord) + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SeqView" +} + +object SeqView { + + /** A `SeqOps` whose collection type and collection type constructor are unknown */ + private type SomeSeqOps[+A] = SeqOps[A, AnyConstr, _] + + /** A view that doesn’t apply any transformation to an underlying sequence */ + @SerialVersionUID(3L) + class Id[+A](underlying: SomeSeqOps[A]) extends AbstractSeqView[A] { + def apply(idx: Int): A = underlying.apply(idx) + def length: Int = underlying.length + def iterator: Iterator[A] = underlying.iterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class Map[+A, +B](underlying: SomeSeqOps[A], f: A => B) extends View.Map[A, B](underlying, f) with SeqView[B] { + def apply(idx: Int): B = f(underlying(idx)) + def length: Int = underlying.length + } + + @SerialVersionUID(3L) + class Appended[+A](underlying: SomeSeqOps[A], elem: A) extends View.Appended(underlying, elem) with SeqView[A] { + def apply(idx: Int): A = if (idx == underlying.length) elem else underlying(idx) + def length: Int = underlying.length + 1 + } + + @SerialVersionUID(3L) + class Prepended[+A](elem: A, underlying: SomeSeqOps[A]) extends View.Prepended(elem, underlying) with SeqView[A] { + def apply(idx: Int): A = if (idx == 0) elem else underlying(idx - 1) + def length: Int = underlying.length + 1 + } + + @SerialVersionUID(3L) + class Concat[A](prefix: SomeSeqOps[A], suffix: SomeSeqOps[A]) extends View.Concat[A](prefix, suffix) with SeqView[A] { + def apply(idx: Int): A = { + val l = prefix.length + if (idx < l) prefix(idx) else suffix(idx - l) + } + def length: Int = prefix.length + suffix.length + } + + @SerialVersionUID(3L) + class Reverse[A](underlying: SomeSeqOps[A]) extends AbstractSeqView[A] { + def apply(i: Int) = underlying.apply(size - 1 - i) + def length = underlying.size + def iterator: Iterator[A] = underlying.reverseIterator + override def knownSize: Int = underlying.knownSize + override def isEmpty: Boolean = underlying.isEmpty + } + + @SerialVersionUID(3L) + class Take[+A](underlying: SomeSeqOps[A], n: Int) extends View.Take(underlying, n) with SeqView[A] { + def apply(idx: Int): A = if (idx < n) { + underlying(idx) + } else { + throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${if (underlying.knownSize >= 0) knownSize - 1 else "unknown"})") + } + def length: Int = underlying.length min normN + } + + @SerialVersionUID(3L) + class TakeRight[+A](underlying: SomeSeqOps[A], n: Int) extends View.TakeRight(underlying, n) with SeqView[A] { + private[this] val delta = (underlying.size - (n max 0)) max 0 + def length = underlying.size - delta + @throws[IndexOutOfBoundsException] + def apply(i: Int) = underlying.apply(i + delta) + } + + @SerialVersionUID(3L) + class Drop[A](underlying: SomeSeqOps[A], n: Int) extends View.Drop[A](underlying, n) with SeqView[A] { + def length = (underlying.size - normN) max 0 + @throws[IndexOutOfBoundsException] + def apply(i: Int) = underlying.apply(i + normN) + override def drop(n: Int): SeqView[A] = new Drop(underlying, this.n + n) + } + + @SerialVersionUID(3L) + class DropRight[A](underlying: SomeSeqOps[A], n: Int) extends View.DropRight[A](underlying, n) with SeqView[A] { + private[this] val len = (underlying.size - (n max 0)) max 0 + def length = len + @throws[IndexOutOfBoundsException] + def apply(i: Int) = underlying.apply(i) + } + + @SerialVersionUID(3L) + class Sorted[A, B >: A] private (private[this] var underlying: SomeSeqOps[A], + private[this] val len: Int, + ord: Ordering[B]) + extends SeqView[A] { + outer => + + // force evaluation immediately by calling `length` so infinite collections + // hang on `sorted`/`sortWith`/`sortBy` rather than on arbitrary method calls + def this(underlying: SomeSeqOps[A], ord: Ordering[B]) = this(underlying, underlying.length, ord) + + @SerialVersionUID(3L) + private[this] class ReverseSorted extends SeqView[A] { + private[this] lazy val _reversed = new SeqView.Reverse(_sorted) + + def apply(i: Int): A = _reversed.apply(i) + def length: Int = len + def iterator: Iterator[A] = Iterator.empty ++ _reversed.iterator // very lazy + override def knownSize: Int = len + override def isEmpty: Boolean = len == 0 + override def to[C1](factory: Factory[A, C1]): C1 = _reversed.to(factory) + override def reverse: SeqView[A] = outer + override protected def reversed: Iterable[A] = outer + + override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A] = + if (ord1 == Sorted.this.ord) outer + else if (ord1.isReverseOf(Sorted.this.ord)) this + else new Sorted(elems, len, ord1) + } + + @volatile private[this] var evaluated = false + + private[this] lazy val _sorted: Seq[A] = { + val res = { + val len = this.len + if (len == 0) Nil + else if (len == 1) List(underlying.head) + else { + val arr = new Array[Any](len) // Array[Any] =:= Array[AnyRef] + underlying.copyToArray(arr) + java.util.Arrays.sort(arr.asInstanceOf[Array[AnyRef]], ord.asInstanceOf[Ordering[AnyRef]]) + // casting the Array[AnyRef] to Array[A] and creating an ArraySeq from it + // is safe because: + // - the ArraySeq is immutable, and items that are not of type A + // cannot be added to it + // - we know it only contains items of type A (and if this collection + // contains items of another type, we'd get a CCE anyway) + // - the cast doesn't actually do anything in the runtime because the + // type of A is not known and Array[_] is Array[AnyRef] + immutable.ArraySeq.unsafeWrapArray(arr.asInstanceOf[Array[A]]) + } + } + evaluated = true + underlying = null + res + } + + private[this] def elems: SomeSeqOps[A] = { + val orig = underlying + if (evaluated) _sorted else orig + } + + def apply(i: Int): A = _sorted.apply(i) + def length: Int = len + def iterator: Iterator[A] = Iterator.empty ++ _sorted.iterator // very lazy + override def knownSize: Int = len + override def isEmpty: Boolean = len == 0 + override def to[C1](factory: Factory[A, C1]): C1 = _sorted.to(factory) + override def reverse: SeqView[A] = new ReverseSorted + // we know `_sorted` is either tiny or has efficient random access, + // so this is acceptable for `reversed` + override protected def reversed: Iterable[A] = new ReverseSorted + + override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A] = + if (ord1 == this.ord) this + else if (ord1.isReverseOf(this.ord)) reverse + else new Sorted(elems, len, ord1) + } +} + +/** Explicit instantiation of the `SeqView` trait to reduce class file size in subclasses. */ +@SerialVersionUID(3L) +abstract class AbstractSeqView[+A] extends AbstractView[A] with SeqView[A] diff --git a/tests/pos-special/stdlib/collection/Set.scala b/tests/pos-special/stdlib/collection/Set.scala new file mode 100644 index 000000000000..0ea1e5689473 --- /dev/null +++ b/tests/pos-special/stdlib/collection/Set.scala @@ -0,0 +1,269 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.util.hashing.MurmurHash3 +import java.lang.String + +import scala.annotation.nowarn + +/** Base trait for set collections. + */ +trait Set[A] + extends Iterable[A] + with SetOps[A, Set, Set[A]] + with Equals + with IterableFactoryDefaults[A, Set] { + + def canEqual(that: Any) = true + + /** + * Equality of sets is implemented using the lookup method [[contains]]. This method returns `true` if + * - the argument `that` is a `Set`, + * - the two sets have the same [[size]], and + * - for every `element` this set, `other.contains(element) == true`. + * + * The implementation of `equals` checks the [[canEqual]] method, so subclasses of `Set` can narrow down the equality + * to specific set types. The `Set` implementations in the standard library can all be compared, their `canEqual` + * methods return `true`. + * + * Note: The `equals` method only respects the equality laws (symmetry, transitivity) if the two sets use the same + * element equivalence function in their lookup operation. For example, the element equivalence operation in a + * [[scala.collection.immutable.TreeSet]] is defined by its ordering. Comparing a `TreeSet` with a `HashSet` leads + * to unexpected results if `ordering.equiv(e1, e2)` (used for lookup in `TreeSet`) is different from `e1 == e2` + * (used for lookup in `HashSet`). + * + * {{{ + * scala> import scala.collection.immutable._ + * scala> val ord: Ordering[String] = _ compareToIgnoreCase _ + * + * scala> TreeSet("A")(ord) == HashSet("a") + * val res0: Boolean = false + * + * scala> HashSet("a") == TreeSet("A")(ord) + * val res1: Boolean = true + * }}} + * + * + * @param that The set to which this set is compared + * @return `true` if the two sets are equal according to the description + */ + override def equals(that: Any): Boolean = + (this eq that.asInstanceOf[AnyRef]) || (that match { + case set: Set[A @unchecked] if set.canEqual(this) => + (this.size == set.size) && { + try this.subsetOf(set) + catch { case _: ClassCastException => false } // PR #9565 / scala/bug#12228 + } + case _ => + false + }) + + override def hashCode(): Int = MurmurHash3.setHash(this) + + override def iterableFactory: IterableFactory[Set] = Set + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "Set" + + override def toString(): String = super[Iterable].toString() // Because `Function1` overrides `toString` too +} + +/** Base trait for set operations + * + * @define coll set + * @define Coll `Set` + */ +trait SetOps[A, +CC[_], +C <: SetOps[A, CC, C]] + extends IterableOps[A, CC, C] + with (A => Boolean) { + + def contains(elem: A): Boolean + + /** Tests if some element is contained in this set. + * + * This method is equivalent to `contains`. It allows sets to be interpreted as predicates. + * @param elem the element to test for membership. + * @return `true` if `elem` is contained in this set, `false` otherwise. + */ + @`inline` final def apply(elem: A): Boolean = this.contains(elem) + + /** Tests whether this set is a subset of another set. + * + * @param that the set to test. + * @return `true` if this set is a subset of `that`, i.e. if + * every element of this set is also an element of `that`. + */ + def subsetOf(that: Set[A]): Boolean = this.forall(that) + + /** An iterator over all subsets of this set of the given size. + * If the requested size is impossible, an empty iterator is returned. + * + * @param len the size of the subsets. + * @return the iterator. + */ + def subsets(len: Int): Iterator[C] = { + if (len < 0 || len > size) Iterator.empty + else new SubsetsItr(this.to(IndexedSeq), len) + } + + /** An iterator over all subsets of this set. + * + * @return the iterator. + */ + def subsets(): Iterator[C] = new AbstractIterator[C] { + private[this] val elms = SetOps.this.to(IndexedSeq) + private[this] var len = 0 + private[this] var itr: Iterator[C] = Iterator.empty + + def hasNext = len <= elms.size || itr.hasNext + def next() = { + if (!itr.hasNext) { + if (len > elms.size) Iterator.empty.next() + else { + itr = new SubsetsItr(elms, len) + len += 1 + } + } + + itr.next() + } + } + + /** An Iterator including all subsets containing exactly len elements. + * If the elements in 'This' type is ordered, then the subsets will also be in the same order. + * ListSet(1,2,3).subsets => {{1},{2},{3},{1,2},{1,3},{2,3},{1,2,3}} + * + * $willForceEvaluation + * + */ + private class SubsetsItr(elms: IndexedSeq[A], len: Int) extends AbstractIterator[C] { + private[this] val idxs = Array.range(0, len+1) + private[this] var _hasNext = true + idxs(len) = elms.size + + def hasNext = _hasNext + @throws[NoSuchElementException] + def next(): C = { + if (!hasNext) Iterator.empty.next() + + val buf = newSpecificBuilder + idxs.slice(0, len) foreach (idx => buf += elms(idx)) + val result = buf.result() + + var i = len - 1 + while (i >= 0 && idxs(i) == idxs(i+1)-1) i -= 1 + + if (i < 0) _hasNext = false + else { + idxs(i) += 1 + for (j <- (i+1) until len) + idxs(j) = idxs(j-1) + 1 + } + + result + } + } + + /** Computes the intersection between this set and another set. + * + * @param that the set to intersect with. + * @return a new set consisting of all elements that are both in this + * set and in the given set `that`. + */ + def intersect(that: Set[A]): C = this.filter(that) + + /** Alias for `intersect` */ + @`inline` final def & (that: Set[A]): C = intersect(that) + + /** Computes the difference of this set and another set. + * + * @param that the set of elements to exclude. + * @return a set containing those elements of this + * set that are not also contained in the given set `that`. + */ + def diff(that: Set[A]): C + + /** Alias for `diff` */ + @`inline` final def &~ (that: Set[A]): C = this diff that + + @deprecated("Consider requiring an immutable Set", "2.13.0") + def -- (that: IterableOnce[A]): C = { + val toRemove = that.iterator.to(immutable.Set) + fromSpecific(view.filterNot(toRemove)) + } + + @deprecated("Consider requiring an immutable Set or fall back to Set.diff", "2.13.0") + def - (elem: A): C = diff(Set(elem)) + + @deprecated("Use &- with an explicit collection argument instead of - with varargs", "2.13.0") + def - (elem1: A, elem2: A, elems: A*): C = diff(elems.toSet + elem1 + elem2) + + /** Creates a new $coll by adding all elements contained in another collection to this $coll, omitting duplicates. + * + * This method takes a collection of elements and adds all elements, omitting duplicates, into $coll. + * + * Example: + * {{{ + * scala> val a = Set(1, 2) concat Set(2, 3) + * a: scala.collection.immutable.Set[Int] = Set(1, 2, 3) + * }}} + * + * @param that the collection containing the elements to add. + * @return a new $coll with the given elements added, omitting duplicates. + */ + def concat(that: collection.IterableOnce[A]): C = this match { + case optimizedSet @ (_ : scala.collection.immutable.Set.Set1[A] | _: scala.collection.immutable.Set.Set2[A] | _: scala.collection.immutable.Set.Set3[A] | _: scala.collection.immutable.Set.Set4[A]) => + // StrictOptimizedSetOps optimization of concat (these Sets cannot extend StrictOptimizedSetOps because of binary-incompatible return type; cf. PR #10036) + var result = optimizedSet.asInstanceOf[scala.collection.immutable.SetOps[A, scala.collection.immutable.Set, scala.collection.immutable.Set[A]]] + val it = that.iterator + while (it.hasNext) result = result + it.next() + result.asInstanceOf[C] + case _ => fromSpecific(that match { + case that: collection.Iterable[A] => new View.Concat(this, that) + case _ => iterator.concat(that.iterator) + }) + } + + @deprecated("Consider requiring an immutable Set or fall back to Set.union", "2.13.0") + def + (elem: A): C = fromSpecific(new View.Appended(this, elem)) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + def + (elem1: A, elem2: A, elems: A*): C = fromSpecific(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) + + /** Alias for `concat` */ + @`inline` final def ++ (that: collection.IterableOnce[A]): C = concat(that) + + /** Computes the union between of set and another set. + * + * @param that the set to form the union with. + * @return a new set consisting of all elements that are in this + * set or in the given set `that`. + */ + @`inline` final def union(that: Set[A]): C = concat(that) + + /** Alias for `union` */ + @`inline` final def | (that: Set[A]): C = concat(that) +} + +/** + * $factoryInfo + * @define coll set + * @define Coll `Set` + */ +@SerialVersionUID(3L) +object Set extends IterableFactory.Delegate[Set](immutable.Set) + +/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ +abstract class AbstractSet[A] extends AbstractIterable[A] with Set[A] diff --git a/tests/pos-special/stdlib/collection/SortedMap.scala b/tests/pos-special/stdlib/collection/SortedMap.scala new file mode 100644 index 000000000000..03ab0bb0dadc --- /dev/null +++ b/tests/pos-special/stdlib/collection/SortedMap.scala @@ -0,0 +1,220 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.{implicitNotFound, nowarn} + +/** A Map whose keys are sorted according to a [[scala.math.Ordering]]*/ +trait SortedMap[K, +V] + extends Map[K, V] + with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] + with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map]{ + + def unsorted: Map[K, V] = this + + def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SortedMap" + + override def equals(that: Any): Boolean = that match { + case _ if this eq that.asInstanceOf[AnyRef] => true + case sm: SortedMap[K @unchecked, _] if sm.ordering == this.ordering => + (sm canEqual this) && + (this.size == sm.size) && { + val i1 = this.iterator + val i2 = sm.iterator + var allEqual = true + while (allEqual && i1.hasNext) { + val kv1 = i1.next() + val kv2 = i2.next() + allEqual = ordering.equiv(kv1._1, kv2._1) && kv1._2 == kv2._2 + } + allEqual + } + case _ => super.equals(that) + } +} + +trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends MapOps[K, V, Map, C] + with SortedOps[K, C] { + + /** The companion object of this sorted map, providing various factory methods. + * + * @note When implementing a custom collection type and refining `CC` to the new type, this + * method needs to be overridden to return a factory for the new type (the compiler will + * issue an error otherwise). + */ + def sortedMapFactory: SortedMapFactory[CC] + + /** Similar to `mapFromIterable`, but returns a SortedMap collection type. + * Note that the return type is now `CC[K2, V2]`. + */ + @`inline` protected final def sortedMapFromIterable[K2, V2](it: Iterable[(K2, V2)])(implicit ordering: Ordering[K2]): CC[K2, V2] = sortedMapFactory.from(it) + + def unsorted: Map[K, V] + + /** + * Creates an iterator over all the key/value pairs + * contained in this map having a key greater than or + * equal to `start` according to the ordering of + * this map. x.iteratorFrom(y) is equivalent + * to but often more efficient than x.from(y).iterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def iteratorFrom(start: K): Iterator[(K, V)] + + /** + * Creates an iterator over all the keys(or elements) contained in this + * collection greater than or equal to `start` + * according to the ordering of this collection. x.keysIteratorFrom(y) + * is equivalent to but often more efficient than + * x.from(y).keysIterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def keysIteratorFrom(start: K): Iterator[K] + + /** + * Creates an iterator over all the values contained in this + * map that are associated with a key greater than or equal to `start` + * according to the ordering of this map. x.valuesIteratorFrom(y) is + * equivalent to but often more efficient than + * x.from(y).valuesIterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def valuesIteratorFrom(start: K): Iterator[V] = iteratorFrom(start).map(_._2) + + def firstKey: K = head._1 + def lastKey: K = last._1 + + /** Find the element with smallest key larger than or equal to a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def minAfter(key: K): Option[(K, V)] = rangeFrom(key).headOption + + /** Find the element with largest key less than a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def maxBefore(key: K): Option[(K, V)] = rangeUntil(key).lastOption + + def rangeTo(to: K): C = { + val i = keySet.rangeFrom(to).iterator + if (i.isEmpty) return coll + val next = i.next() + if (ordering.compare(next, to) == 0) + if (i.isEmpty) coll + else rangeUntil(i.next()) + else + rangeUntil(next) + } + + override def keySet: SortedSet[K] = new KeySortedSet + + /** The implementation class of the set returned by `keySet` */ + protected class KeySortedSet extends SortedSet[K] with GenKeySet with GenKeySortedSet { + def diff(that: Set[K]): SortedSet[K] = fromSpecific(view.filterNot(that)) + def rangeImpl(from: Option[K], until: Option[K]): SortedSet[K] = { + val map = SortedMapOps.this.rangeImpl(from, until) + new map.KeySortedSet + } + } + + /** A generic trait that is reused by sorted keyset implementations */ + protected trait GenKeySortedSet extends GenKeySet { this: SortedSet[K] => + implicit def ordering: Ordering[K] = SortedMapOps.this.ordering + def iteratorFrom(start: K): Iterator[K] = SortedMapOps.this.keysIteratorFrom(start) + } + + // And finally, we add new overloads taking an ordering + /** Builds a new sorted map by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) + + /** Builds a new sorted map by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.FlatMap(this, f)) + + /** Builds a new sorted map by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.Collect(this, pf)) + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CC[K, V2] = sortedMapFactory.from(suffix match { + case it: Iterable[(K, V2)] => new View.Concat(this, it) + case _ => iterator.concat(suffix.iterator) + })(ordering) + + /** Alias for `concat` */ + @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]): CC[K, V2] = concat(xs) + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(this, kv))(ordering) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems))(ordering) +} + +object SortedMapOps { + private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a SortedMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." + + /** Specializes `MapWithFilter` for sorted Map collections + * + * @define coll sorted map collection + */ + class WithFilter[K, +V, +IterableCC[_], +MapCC[X, Y] <: Map[X, Y], +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _]]( + self: SortedMapOps[K, V, CC, _] with MapOps[K, V, MapCC, _] with IterableOps[(K, V), IterableCC, _], + p: ((K, V)) => Boolean + ) extends MapOps.WithFilter[K, V, IterableCC, MapCC](self, p) { + + def map[K2 : Ordering, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = + self.sortedMapFactory.from(new View.Map(filtered, f)) + + def flatMap[K2 : Ordering, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = + self.sortedMapFactory.from(new View.FlatMap(filtered, f)) + + override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, MapCC, CC] = + new WithFilter[K, V, IterableCC, MapCC, CC](self, (kv: (K, V)) => p(kv) && q(kv)) + + } + +} + +@SerialVersionUID(3L) +object SortedMap extends SortedMapFactory.Delegate[SortedMap](immutable.SortedMap) diff --git a/tests/pos-special/stdlib/collection/SortedOps.scala b/tests/pos-special/stdlib/collection/SortedOps.scala new file mode 100644 index 000000000000..64e6376be042 --- /dev/null +++ b/tests/pos-special/stdlib/collection/SortedOps.scala @@ -0,0 +1,90 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + + +/** Base trait for sorted collections */ +trait SortedOps[A, +C] { + + def ordering: Ordering[A] + + /** Returns the first key of the collection. */ + def firstKey: A + + /** Returns the last key of the collection. */ + def lastKey: A + + /** Comparison function that orders keys. */ + @deprecated("Use ordering.compare instead", "2.13.0") + @deprecatedOverriding("Use ordering.compare instead", "2.13.0") + @inline def compare(k0: A, k1: A): Int = ordering.compare(k0, k1) + + /** Creates a ranged projection of this collection. Any mutations in the + * ranged projection will update this collection and vice versa. + * + * Note: keys are not guaranteed to be consistent between this collection + * and the projection. This is the case for buffers where indexing is + * relative to the projection. + * + * @param from The lower-bound (inclusive) of the ranged projection. + * `None` if there is no lower bound. + * @param until The upper-bound (exclusive) of the ranged projection. + * `None` if there is no upper bound. + */ + def rangeImpl(from: Option[A], until: Option[A]): C + + /** Creates a ranged projection of this collection with both a lower-bound + * and an upper-bound. + * + * @param from The lower-bound (inclusive) of the ranged projection. + * @param until The upper-bound (exclusive) of the ranged projection. + */ + def range(from: A, until: A): C = rangeImpl(Some(from), Some(until)) + + /** Creates a ranged projection of this collection with no upper-bound. + * + * @param from The lower-bound (inclusive) of the ranged projection. + */ + @deprecated("Use rangeFrom", "2.13.0") + final def from(from: A): C = rangeFrom(from) + + /** Creates a ranged projection of this collection with no upper-bound. + * + * @param from The lower-bound (inclusive) of the ranged projection. + */ + def rangeFrom(from: A): C = rangeImpl(Some(from), None) + + /** Creates a ranged projection of this collection with no lower-bound. + * + * @param until The upper-bound (exclusive) of the ranged projection. + */ + @deprecated("Use rangeUntil", "2.13.0") + final def until(until: A): C = rangeUntil(until) + + /** Creates a ranged projection of this collection with no lower-bound. + * + * @param until The upper-bound (exclusive) of the ranged projection. + */ + def rangeUntil(until: A): C = rangeImpl(None, Some(until)) + + /** Create a range projection of this collection with no lower-bound. + * @param to The upper-bound (inclusive) of the ranged projection. + */ + @deprecated("Use rangeTo", "2.13.0") + final def to(to: A): C = rangeTo(to) + + /** Create a range projection of this collection with no lower-bound. + * @param to The upper-bound (inclusive) of the ranged projection. + */ + def rangeTo(to: A): C +} diff --git a/tests/pos-special/stdlib/collection/SortedSet.scala b/tests/pos-special/stdlib/collection/SortedSet.scala new file mode 100644 index 000000000000..c98ca9ae5523 --- /dev/null +++ b/tests/pos-special/stdlib/collection/SortedSet.scala @@ -0,0 +1,189 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.{implicitNotFound, nowarn} +import scala.annotation.unchecked.uncheckedVariance + +/** Base type of sorted sets */ +trait SortedSet[A] extends Set[A] + with SortedSetOps[A, SortedSet, SortedSet[A]] + with SortedSetFactoryDefaults[A, SortedSet, Set] { + + def unsorted: Set[A] = this + + def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SortedSet" + + override def equals(that: Any): Boolean = that match { + case _ if this eq that.asInstanceOf[AnyRef] => true + case ss: SortedSet[A @unchecked] if ss.ordering == this.ordering => + (ss canEqual this) && + (this.size == ss.size) && { + val i1 = this.iterator + val i2 = ss.iterator + var allEqual = true + while (allEqual && i1.hasNext) + allEqual = ordering.equiv(i1.next(), i2.next()) + allEqual + } + case _ => + super.equals(that) + } + +} + +trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SetOps[A, Set, C] + with SortedOps[A, C] { + + /** The companion object of this sorted set, providing various factory methods. + * + * @note When implementing a custom collection type and refining `CC` to the new type, this + * method needs to be overridden to return a factory for the new type (the compiler will + * issue an error otherwise). + */ + def sortedIterableFactory: SortedIterableFactory[CC] + + def unsorted: Set[A] + + /** + * Creates an iterator that contains all values from this collection + * greater than or equal to `start` according to the ordering of + * this collection. x.iteratorFrom(y) is equivalent to but will usually + * be more efficient than x.from(y).iterator + * + * @param start The lower-bound (inclusive) of the iterator + */ + def iteratorFrom(start: A): Iterator[A] + + @deprecated("Use `iteratorFrom` instead.", "2.13.0") + @`inline` def keysIteratorFrom(start: A): Iterator[A] = iteratorFrom(start) + + def firstKey: A = head + def lastKey: A = last + + /** Find the smallest element larger than or equal to a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def minAfter(key: A): Option[A] = rangeFrom(key).headOption + + /** Find the largest element less than a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def maxBefore(key: A): Option[A] = rangeUntil(key).lastOption + + override def min[B >: A](implicit ord: Ordering[B]): A = + if (isEmpty) throw new UnsupportedOperationException("empty.min") + else if (ord == ordering) head + else if (ord isReverseOf ordering) last + else super.min[B] // need the type annotation for it to infer the correct implicit + + override def max[B >: A](implicit ord: Ordering[B]): A = + if (isEmpty) throw new UnsupportedOperationException("empty.max") + else if (ord == ordering) last + else if (ord isReverseOf ordering) head + else super.max[B] // need the type annotation for it to infer the correct implicit + + def rangeTo(to: A): C = { + val i = rangeFrom(to).iterator + if (i.isEmpty) return coll + val next = i.next() + if (ordering.compare(next, to) == 0) + if (i.isEmpty) coll + else rangeUntil(i.next()) + else + rangeUntil(next) + } + + /** Builds a new sorted collection by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[B](f: A => B)(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + sortedIterableFactory.from(new View.Map(this, f)) + + /** Builds a new sorted collection by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[B](f: A => IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + sortedIterableFactory.from(new View.FlatMap(this, f)) + + /** Returns a $coll formed from this $coll and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is longer than the other, its remaining elements are ignored. + * + * @param that The iterable providing the second half of each result pair + * @tparam B the type of the second half of the returned pairs + * @return a new $coll containing pairs consisting of corresponding elements of this $coll and `that`. + * The length of the returned collection is the minimum of the lengths of this $coll and `that`. + */ + def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.zipOrdMsg) ev: Ordering[(A @uncheckedVariance, B)]): CC[(A @uncheckedVariance, B)] = // sound bcs of VarianceNote + sortedIterableFactory.from(that match { + case that: Iterable[B] => new View.Zip(this, that) + case _ => iterator.zip(that) + }) + + /** Builds a new sorted collection by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[B](pf: scala.PartialFunction[A, B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + sortedIterableFactory.from(new View.Collect(this, pf)) +} + +object SortedSetOps { + private[collection] final val ordMsg = "No implicit Ordering[${B}] found to build a SortedSet[${B}]. You may want to upcast to a Set[${A}] first by calling `unsorted`." + private[collection] final val zipOrdMsg = "No implicit Ordering[${B}] found to build a SortedSet[(${A}, ${B})]. You may want to upcast to a Set[${A}] first by calling `unsorted`." + + /** Specialize `WithFilter` for sorted collections + * + * @define coll sorted collection + */ + class WithFilter[+A, +IterableCC[_], +CC[X] <: SortedSet[X]]( + self: SortedSetOps[A, CC, _] with IterableOps[A, IterableCC, _], + p: A => Boolean + ) extends IterableOps.WithFilter[A, IterableCC](self, p) { + + def map[B : Ordering](f: A => B): CC[B] = + self.sortedIterableFactory.from(new View.Map(filtered, f)) + + def flatMap[B : Ordering](f: A => IterableOnce[B]): CC[B] = + self.sortedIterableFactory.from(new View.FlatMap(filtered, f)) + + override def withFilter(q: A => Boolean): WithFilter[A, IterableCC, CC] = + new WithFilter[A, IterableCC, CC](self, (a: A) => p(a) && q(a)) + } + +} + +@SerialVersionUID(3L) +object SortedSet extends SortedIterableFactory.Delegate[SortedSet](immutable.SortedSet) + diff --git a/tests/pos-special/stdlib/collection/Stepper.scala b/tests/pos-special/stdlib/collection/Stepper.scala new file mode 100644 index 000000000000..0eeb8a44cb72 --- /dev/null +++ b/tests/pos-special/stdlib/collection/Stepper.scala @@ -0,0 +1,368 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import java.util.function.{Consumer, DoubleConsumer, IntConsumer, LongConsumer} +import java.util.{PrimitiveIterator, Spliterator, Iterator => JIterator} +import java.{lang => jl} + +import scala.collection.Stepper.EfficientSplit + +/** Steppers exist to enable creating Java streams over Scala collections, see + * [[scala.jdk.StreamConverters]]. Besides that use case, they allow iterating over collections + * holding unboxed primitives (e.g., `Array[Int]`) without boxing the elements. + * + * Steppers have an iterator-like interface with methods `hasStep` and `nextStep()`. The difference + * to iterators - and the reason `Stepper` is not a subtype of `Iterator` - is that there are + * hand-specialized variants of `Stepper` for `Int`, `Long` and `Double` ([[IntStepper]], etc.). + * These enable iterating over collections holding unboxed primitives (e.g., Arrays, + * [[scala.jdk.Accumulator]]s) without boxing the elements. + * + * The selection of primitive types (`Int`, `Long` and `Double`) matches the hand-specialized + * variants of Java Streams ([[java.util.stream.Stream]], [[java.util.stream.IntStream]], etc.) + * and the corresponding Java Spliterators ([[java.util.Spliterator]], [[java.util.Spliterator.OfInt]], etc.). + * + * Steppers can be converted to Scala Iterators, Java Iterators and Java Spliterators. Primitive + * Steppers are converted to the corresponding primitive Java Iterators and Spliterators. + * + * @tparam A the element type of the Stepper + */ +trait Stepper[@specialized(Double, Int, Long) +A] { + /** Check if there's an element available. */ + def hasStep: Boolean + + /** Return the next element and advance the stepper */ + def nextStep(): A + + /** Split this stepper, if applicable. The elements of the current Stepper are split up between + * the resulting Stepper and the current stepper. + * + * May return `null`, in which case the current Stepper yields the same elements as before. + * + * See method `trySplit` in [[java.util.Spliterator]]. + */ + def trySplit(): Stepper[A] + + /** Returns an estimate of the number of elements of this Stepper, or [[Long.MaxValue]]. See + * method `estimateSize` in [[java.util.Spliterator]]. + */ + def estimateSize: Long + + /** Returns a set of characteristics of this Stepper and its elements. See method + * `characteristics` in [[java.util.Spliterator]]. + */ + def characteristics: Int + + /** Returns a [[java.util.Spliterator]] corresponding to this Stepper. + * + * Note that the return type is `Spliterator[_]` instead of `Spliterator[A]` to allow returning + * a [[java.util.Spliterator.OfInt]] (which is a `Spliterator[Integer]`) in the subclass [[IntStepper]] + * (which is a `Stepper[Int]`). + */ + def spliterator[B >: A]: Spliterator[_] + + /** Returns a Java [[java.util.Iterator]] corresponding to this Stepper. + * + * Note that the return type is `Iterator[_]` instead of `Iterator[A]` to allow returning + * a [[java.util.PrimitiveIterator.OfInt]] (which is a `Iterator[Integer]`) in the subclass + * [[IntStepper]] (which is a `Stepper[Int]`). + */ + def javaIterator[B >: A]: JIterator[_] + + /** Returns an [[Iterator]] corresponding to this Stepper. Note that Iterators corresponding to + * primitive Steppers box the elements. + */ + def iterator: Iterator[A] = new AbstractIterator[A] { + def hasNext: Boolean = hasStep + def next(): A = nextStep() + } +} + +object Stepper { + /** A marker trait that indicates that a `Stepper` can call `trySplit` with at worst O(log N) time + * and space complexity, and that the division is likely to be reasonably even. Steppers marked + * with `EfficientSplit` can be converted to parallel streams with the `asJavaParStream` method + * defined in [[scala.jdk.StreamConverters]]. + */ + trait EfficientSplit + + private[collection] final def throwNSEE(): Nothing = throw new NoSuchElementException("Empty Stepper") + + /* These adapter classes can wrap an AnyStepper of a numeric type into a possibly widened primitive Stepper type. + * This provides a basis for more efficient stream processing on unboxed values provided that the original source + * of the data is boxed. In other cases native implementations of the primitive stepper types should be provided + * (see for example IntArrayStepper and WidenedByteArrayStepper). */ + + private[collection] class UnboxingDoubleStepper(st: AnyStepper[Double]) extends DoubleStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Double = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): DoubleStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingDoubleStepper(s) + } + } + + private[collection] class UnboxingIntStepper(st: AnyStepper[Int]) extends IntStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): IntStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingIntStepper(s) + } + } + + private[collection] class UnboxingLongStepper(st: AnyStepper[Long]) extends LongStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Long = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): LongStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingLongStepper(s) + } + } + + private[collection] class UnboxingByteStepper(st: AnyStepper[Byte]) extends IntStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): IntStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingByteStepper(s) + } + } + + private[collection] class UnboxingCharStepper(st: AnyStepper[Char]) extends IntStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): IntStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingCharStepper(s) + } + } + + private[collection] class UnboxingShortStepper(st: AnyStepper[Short]) extends IntStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): IntStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingShortStepper(s) + } + } + + private[collection] class UnboxingFloatStepper(st: AnyStepper[Float]) extends DoubleStepper { + def hasStep: Boolean = st.hasStep + def nextStep(): Double = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): DoubleStepper = { + val s = st.trySplit() + if (s == null) null else new UnboxingFloatStepper(s) + } + } +} + +/** A Stepper for arbitrary element types. See [[Stepper]]. */ +trait AnyStepper[+A] extends Stepper[A] { + def trySplit(): AnyStepper[A] + + def spliterator[B >: A]: Spliterator[B] = new AnyStepper.AnyStepperSpliterator(this) + + def javaIterator[B >: A]: JIterator[B] = new JIterator[B] { + def hasNext: Boolean = hasStep + def next(): B = nextStep() + } +} + +object AnyStepper { + class AnyStepperSpliterator[A](s: AnyStepper[A]) extends Spliterator[A] { + def tryAdvance(c: Consumer[_ >: A]): Boolean = + if (s.hasStep) { c.accept(s.nextStep()); true } else false + def trySplit(): Spliterator[A] = { + val sp = s.trySplit() + if (sp == null) null else sp.spliterator + } + def estimateSize(): Long = s.estimateSize + def characteristics(): Int = s.characteristics + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: Consumer[_ >: A]): Unit = + while (s.hasStep) { c.accept(s.nextStep()) } + } + + def ofSeqDoubleStepper(st: DoubleStepper): AnyStepper[Double] = new BoxedDoubleStepper(st) + def ofParDoubleStepper(st: DoubleStepper with EfficientSplit): AnyStepper[Double] with EfficientSplit = new BoxedDoubleStepper(st) with EfficientSplit + + def ofSeqIntStepper(st: IntStepper): AnyStepper[Int] = new BoxedIntStepper(st) + def ofParIntStepper(st: IntStepper with EfficientSplit): AnyStepper[Int] with EfficientSplit = new BoxedIntStepper(st) with EfficientSplit + + def ofSeqLongStepper(st: LongStepper): AnyStepper[Long] = new BoxedLongStepper(st) + def ofParLongStepper(st: LongStepper with EfficientSplit): AnyStepper[Long] with EfficientSplit = new BoxedLongStepper(st) with EfficientSplit + + private[collection] class BoxedDoubleStepper(st: DoubleStepper) extends AnyStepper[Double] { + def hasStep: Boolean = st.hasStep + def nextStep(): Double = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): AnyStepper[Double] = { + val s = st.trySplit() + if (s == null) null else new BoxedDoubleStepper(s) + } + } + + private[collection] class BoxedIntStepper(st: IntStepper) extends AnyStepper[Int] { + def hasStep: Boolean = st.hasStep + def nextStep(): Int = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): AnyStepper[Int] = { + val s = st.trySplit() + if (s == null) null else new BoxedIntStepper(s) + } + } + + private[collection] class BoxedLongStepper(st: LongStepper) extends AnyStepper[Long] { + def hasStep: Boolean = st.hasStep + def nextStep(): Long = st.nextStep() + def estimateSize: Long = st.estimateSize + def characteristics: Int = st.characteristics + def trySplit(): AnyStepper[Long] = { + val s = st.trySplit() + if (s == null) null else new BoxedLongStepper(s) + } + } +} + +/** A Stepper for Ints. See [[Stepper]]. */ +trait IntStepper extends Stepper[Int] { + def trySplit(): IntStepper + + def spliterator[B >: Int]: Spliterator.OfInt = new IntStepper.IntStepperSpliterator(this) + + def javaIterator[B >: Int]: PrimitiveIterator.OfInt = new PrimitiveIterator.OfInt { + def hasNext: Boolean = hasStep + def nextInt(): Int = nextStep() + } +} +object IntStepper { + class IntStepperSpliterator(s: IntStepper) extends Spliterator.OfInt { + def tryAdvance(c: IntConsumer): Boolean = + if (s.hasStep) { c.accept(s.nextStep()); true } else false + // Override for efficiency: don't wrap the function and call the `tryAdvance` overload + override def tryAdvance(c: Consumer[_ >: jl.Integer]): Boolean = (c: AnyRef) match { + case ic: IntConsumer => tryAdvance(ic) + case _ => if (s.hasStep) { c.accept(jl.Integer.valueOf(s.nextStep())); true } else false + } + // override required for dotty#6152 + override def trySplit(): Spliterator.OfInt = { + val sp = s.trySplit() + if (sp == null) null else sp.spliterator + } + def estimateSize(): Long = s.estimateSize + def characteristics(): Int = s.characteristics + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: IntConsumer): Unit = + while (s.hasStep) { c.accept(s.nextStep()) } + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: Consumer[_ >: jl.Integer]): Unit = (c: AnyRef) match { + case ic: IntConsumer => forEachRemaining(ic) + case _ => while (s.hasStep) { c.accept(jl.Integer.valueOf(s.nextStep())) } + } + } +} + +/** A Stepper for Doubles. See [[Stepper]]. */ +trait DoubleStepper extends Stepper[Double] { + def trySplit(): DoubleStepper + + def spliterator[B >: Double]: Spliterator.OfDouble = new DoubleStepper.DoubleStepperSpliterator(this) + + def javaIterator[B >: Double]: PrimitiveIterator.OfDouble = new PrimitiveIterator.OfDouble { + def hasNext: Boolean = hasStep + def nextDouble(): Double = nextStep() + } +} + +object DoubleStepper { + class DoubleStepperSpliterator(s: DoubleStepper) extends Spliterator.OfDouble { + def tryAdvance(c: DoubleConsumer): Boolean = + if (s.hasStep) { c.accept(s.nextStep()); true } else false + // Override for efficiency: don't wrap the function and call the `tryAdvance` overload + override def tryAdvance(c: Consumer[_ >: jl.Double]): Boolean = (c: AnyRef) match { + case ic: DoubleConsumer => tryAdvance(ic) + case _ => if (s.hasStep) { c.accept(java.lang.Double.valueOf(s.nextStep())); true } else false + } + // override required for dotty#6152 + override def trySplit(): Spliterator.OfDouble = { + val sp = s.trySplit() + if (sp == null) null else sp.spliterator + } + def estimateSize(): Long = s.estimateSize + def characteristics(): Int = s.characteristics + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: DoubleConsumer): Unit = + while (s.hasStep) { c.accept(s.nextStep()) } + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: Consumer[_ >: jl.Double]): Unit = (c: AnyRef) match { + case ic: DoubleConsumer => forEachRemaining(ic) + case _ => while (s.hasStep) { c.accept(jl.Double.valueOf(s.nextStep())) } + } + } +} + +/** A Stepper for Longs. See [[Stepper]]. */ +trait LongStepper extends Stepper[Long] { + def trySplit(): LongStepper + + def spliterator[B >: Long]: Spliterator.OfLong = new LongStepper.LongStepperSpliterator(this) + + def javaIterator[B >: Long]: PrimitiveIterator.OfLong = new PrimitiveIterator.OfLong { + def hasNext: Boolean = hasStep + def nextLong(): Long = nextStep() + } +} + +object LongStepper { + class LongStepperSpliterator(s: LongStepper) extends Spliterator.OfLong { + def tryAdvance(c: LongConsumer): Boolean = + if (s.hasStep) { c.accept(s.nextStep()); true } else false + // Override for efficiency: don't wrap the function and call the `tryAdvance` overload + override def tryAdvance(c: Consumer[_ >: jl.Long]): Boolean = (c: AnyRef) match { + case ic: LongConsumer => tryAdvance(ic) + case _ => if (s.hasStep) { c.accept(java.lang.Long.valueOf(s.nextStep())); true } else false + } + // override required for dotty#6152 + override def trySplit(): Spliterator.OfLong = { + val sp = s.trySplit() + if (sp == null) null else sp.spliterator + } + def estimateSize(): Long = s.estimateSize + def characteristics(): Int = s.characteristics + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: LongConsumer): Unit = + while (s.hasStep) { c.accept(s.nextStep()) } + // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance + override def forEachRemaining(c: Consumer[_ >: jl.Long]): Unit = (c: AnyRef) match { + case ic: LongConsumer => forEachRemaining(ic) + case _ => while (s.hasStep) { c.accept(jl.Long.valueOf(s.nextStep())) } + } + } +} diff --git a/tests/pos-special/stdlib/collection/StepperShape.scala b/tests/pos-special/stdlib/collection/StepperShape.scala new file mode 100644 index 000000000000..6712073b09e4 --- /dev/null +++ b/tests/pos-special/stdlib/collection/StepperShape.scala @@ -0,0 +1,114 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import java.{lang => jl} + +import scala.collection.Stepper.EfficientSplit + +/** An implicit StepperShape instance is used in the [[IterableOnce.stepper]] to return a possibly + * specialized Stepper `S` according to the element type `T`. + */ +sealed trait StepperShape[-T, S <: Stepper[_]] { + /** Return the Int constant (as defined in the `StepperShape` companion object) for this `StepperShape`. */ + def shape: StepperShape.Shape + + /** Create an unboxing primitive sequential Stepper from a boxed `AnyStepper`. + * This is an identity operation for reference shapes. */ + def seqUnbox(st: AnyStepper[T]): S + + /** Create an unboxing primitive parallel (i.e. `with EfficientSplit`) Stepper from a boxed `AnyStepper`. + * This is an identity operation for reference shapes. */ + def parUnbox(st: AnyStepper[T] with EfficientSplit): S with EfficientSplit +} + +object StepperShape extends StepperShapeLowPriority1 { + class Shape private[StepperShape] (private val s: Int) extends AnyVal + + // reference + val ReferenceShape = new Shape(0) + + // primitive + val IntShape = new Shape(1) + val LongShape = new Shape(2) + val DoubleShape = new Shape(3) + + // widening + val ByteShape = new Shape(4) + val ShortShape = new Shape(5) + val CharShape = new Shape(6) + val FloatShape = new Shape(7) + + implicit val intStepperShape: StepperShape[Int, IntStepper] = new StepperShape[Int, IntStepper] { + def shape = IntShape + def seqUnbox(st: AnyStepper[Int]): IntStepper = new Stepper.UnboxingIntStepper(st) + def parUnbox(st: AnyStepper[Int] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingIntStepper(st) with EfficientSplit + } + implicit val jIntegerStepperShape: StepperShape[jl.Integer, IntStepper] = intStepperShape.asInstanceOf[StepperShape[jl.Integer, IntStepper]] + + implicit val longStepperShape: StepperShape[Long, LongStepper] = new StepperShape[Long, LongStepper] { + def shape = LongShape + def seqUnbox(st: AnyStepper[Long]): LongStepper = new Stepper.UnboxingLongStepper(st) + def parUnbox(st: AnyStepper[Long] with EfficientSplit): LongStepper with EfficientSplit = new Stepper.UnboxingLongStepper(st) with EfficientSplit + } + implicit val jLongStepperShape: StepperShape[jl.Long, LongStepper] = longStepperShape.asInstanceOf[StepperShape[jl.Long, LongStepper]] + + implicit val doubleStepperShape: StepperShape[Double, DoubleStepper] = new StepperShape[Double, DoubleStepper] { + def shape = DoubleShape + def seqUnbox(st: AnyStepper[Double]): DoubleStepper = new Stepper.UnboxingDoubleStepper(st) + def parUnbox(st: AnyStepper[Double] with EfficientSplit): DoubleStepper with EfficientSplit = new Stepper.UnboxingDoubleStepper(st) with EfficientSplit + } + implicit val jDoubleStepperShape: StepperShape[jl.Double, DoubleStepper] = doubleStepperShape.asInstanceOf[StepperShape[jl.Double, DoubleStepper]] + + implicit val byteStepperShape: StepperShape[Byte, IntStepper] = new StepperShape[Byte, IntStepper] { + def shape = ByteShape + def seqUnbox(st: AnyStepper[Byte]): IntStepper = new Stepper.UnboxingByteStepper(st) + def parUnbox(st: AnyStepper[Byte] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingByteStepper(st) with EfficientSplit + } + implicit val jByteStepperShape: StepperShape[jl.Byte, IntStepper] = byteStepperShape.asInstanceOf[StepperShape[jl.Byte, IntStepper]] + + implicit val shortStepperShape: StepperShape[Short, IntStepper] = new StepperShape[Short, IntStepper] { + def shape = ShortShape + def seqUnbox(st: AnyStepper[Short]): IntStepper = new Stepper.UnboxingShortStepper(st) + def parUnbox(st: AnyStepper[Short] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingShortStepper(st) with EfficientSplit + } + implicit val jShortStepperShape: StepperShape[jl.Short, IntStepper] = shortStepperShape.asInstanceOf[StepperShape[jl.Short, IntStepper]] + + implicit val charStepperShape: StepperShape[Char, IntStepper] = new StepperShape[Char, IntStepper] { + def shape = CharShape + def seqUnbox(st: AnyStepper[Char]): IntStepper = new Stepper.UnboxingCharStepper(st) + def parUnbox(st: AnyStepper[Char] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingCharStepper(st) with EfficientSplit + } + implicit val jCharacterStepperShape: StepperShape[jl.Character, IntStepper] = charStepperShape.asInstanceOf[StepperShape[jl.Character, IntStepper]] + + implicit val floatStepperShape: StepperShape[Float, DoubleStepper] = new StepperShape[Float, DoubleStepper] { + def shape = FloatShape + def seqUnbox(st: AnyStepper[Float]): DoubleStepper = new Stepper.UnboxingFloatStepper(st) + def parUnbox(st: AnyStepper[Float] with EfficientSplit): DoubleStepper with EfficientSplit = new Stepper.UnboxingFloatStepper(st) with EfficientSplit + } + implicit val jFloatStepperShape: StepperShape[jl.Float, DoubleStepper] = floatStepperShape.asInstanceOf[StepperShape[jl.Float, DoubleStepper]] +} + +trait StepperShapeLowPriority1 extends StepperShapeLowPriority2 { + implicit def anyStepperShape[T]: StepperShape[T, AnyStepper[T]] = anyStepperShapePrototype.asInstanceOf[StepperShape[T, AnyStepper[T]]] +} + +trait StepperShapeLowPriority2 { + implicit def baseStepperShape[T]: StepperShape[T, Stepper[T]] = anyStepperShapePrototype.asInstanceOf[StepperShape[T, Stepper[T]]] + + protected val anyStepperShapePrototype: StepperShape[AnyRef, Stepper[AnyRef]] = new StepperShape[AnyRef, Stepper[AnyRef]] { + def shape = StepperShape.ReferenceShape + def seqUnbox(st: AnyStepper[AnyRef]): Stepper[AnyRef] = st + def parUnbox(st: AnyStepper[AnyRef] with EfficientSplit): Stepper[AnyRef] with EfficientSplit = st + } +} \ No newline at end of file diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedIterableOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedIterableOps.scala index 5b504a2469b5..a09766cfa912 100644 --- a/tests/pos-special/stdlib/collection/StrictOptimizedIterableOps.scala +++ b/tests/pos-special/stdlib/collection/StrictOptimizedIterableOps.scala @@ -16,7 +16,6 @@ package collection import scala.annotation.nowarn import scala.annotation.unchecked.uncheckedVariance import scala.runtime.Statics -import language.experimental.captureChecking /** * Trait that overrides iterable operations to take advantage of strict builders. @@ -28,7 +27,6 @@ import language.experimental.captureChecking trait StrictOptimizedIterableOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { - this: StrictOptimizedIterableOps[A, CC, C] => // Optimized, push-based version of `partition` override def partition(p: A => Boolean): (C, C) = { @@ -57,7 +55,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] (first.result(), second.result()) } - override def unzip[A1, A2](implicit asPair: A -> (A1, A2)): (CC[A1], CC[A2]) = { + override def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = { val first = iterableFactory.newBuilder[A1] val second = iterableFactory.newBuilder[A2] foreach { a => @@ -68,7 +66,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] (first.result(), second.result()) } - override def unzip3[A1, A2, A3](implicit asTriple: A -> (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { + override def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { val b1 = iterableFactory.newBuilder[A1] val b2 = iterableFactory.newBuilder[A2] val b3 = iterableFactory.newBuilder[A3] @@ -104,7 +102,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] b.result() } - override def flatMap[B](f: A => IterableOnce[B]^): CC[B] = + override def flatMap[B](f: A => IterableOnce[B]): CC[B] = strictOptimizedFlatMap(iterableFactory.newBuilder, f) /** @@ -114,7 +112,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] * @tparam C2 Type of the resulting collection (e.g. `List[String]`) * @return The resulting collection */ - @inline protected[this] final def strictOptimizedFlatMap[B, C2](b: mutable.Builder[B, C2], f: A => IterableOnce[B]^): C2 = { + @inline protected[this] final def strictOptimizedFlatMap[B, C2](b: mutable.Builder[B, C2], f: A => IterableOnce[B]): C2 = { val it = iterator while (it.hasNext) { b ++= f(it.next()) @@ -129,13 +127,13 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] * @tparam C2 Type of the resulting collection (e.g. `List[Int]`) * @return The resulting collection */ - @inline protected[this] final def strictOptimizedConcat[B >: A, C2](that: IterableOnce[B]^, b: mutable.Builder[B, C2]): C2 = { + @inline protected[this] final def strictOptimizedConcat[B >: A, C2](that: IterableOnce[B], b: mutable.Builder[B, C2]): C2 = { b ++= this b ++= that b.result() } - override def collect[B](pf: PartialFunction[A, B]^): CC[B] = + override def collect[B](pf: PartialFunction[A, B]): CC[B] = strictOptimizedCollect(iterableFactory.newBuilder, pf) /** @@ -145,7 +143,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] * @tparam C2 Type of the resulting collection (e.g. `List[String]`) * @return The resulting collection */ - @inline protected[this] final def strictOptimizedCollect[B, C2](b: mutable.Builder[B, C2], pf: PartialFunction[A, B]^): C2 = { + @inline protected[this] final def strictOptimizedCollect[B, C2](b: mutable.Builder[B, C2], pf: PartialFunction[A, B]): C2 = { val marker = Statics.pfMarker val it = iterator while (it.hasNext) { @@ -156,7 +154,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] b.result() } - override def flatten[B](implicit toIterableOnce: A -> IterableOnce[B]): CC[B] = + override def flatten[B](implicit toIterableOnce: A => IterableOnce[B]): CC[B] = strictOptimizedFlatten(iterableFactory.newBuilder) /** @@ -166,7 +164,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] * @tparam C2 Type of the resulting collection (e.g. `List[Int]`) * @return The resulting collection */ - @inline protected[this] final def strictOptimizedFlatten[B, C2](b: mutable.Builder[B, C2])(implicit toIterableOnce: A -> IterableOnce[B]): C2 = { + @inline protected[this] final def strictOptimizedFlatten[B, C2](b: mutable.Builder[B, C2])(implicit toIterableOnce: A => IterableOnce[B]): C2 = { val it = iterator while (it.hasNext) { b ++= toIterableOnce(it.next()) @@ -174,7 +172,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] b.result() } - override def zip[B](that: IterableOnce[B]^): CC[(A @uncheckedVariance, B)] = + override def zip[B](that: IterableOnce[B]): CC[(A @uncheckedVariance, B)] = strictOptimizedZip(that, iterableFactory.newBuilder[(A, B)]) /** @@ -184,7 +182,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] * @tparam C2 Type of the resulting collection (e.g. `List[(Int, String)]`) * @return The resulting collection */ - @inline protected[this] final def strictOptimizedZip[B, C2](that: IterableOnce[B]^, b: mutable.Builder[(A, B), C2]): C2 = { + @inline protected[this] final def strictOptimizedZip[B, C2](that: IterableOnce[B], b: mutable.Builder[(A, B), C2]): C2 = { val it1 = iterator val it2 = that.iterator while (it1.hasNext && it2.hasNext) { diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala new file mode 100644 index 000000000000..1f5791bbb718 --- /dev/null +++ b/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala @@ -0,0 +1,48 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +/** + * Trait that overrides map operations to take advantage of strict builders. + * + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedMapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] + extends MapOps[K, V, CC, C] + with StrictOptimizedIterableOps[(K, V), Iterable, C] { + + override def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = + strictOptimizedMap(mapFactory.newBuilder, f) + + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = + strictOptimizedFlatMap(mapFactory.newBuilder, f) + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CC[K, V2] = + strictOptimizedConcat(suffix, mapFactory.newBuilder) + + override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): CC[K2, V2] = + strictOptimizedCollect(mapFactory.newBuilder, pf) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = { + val b = mapFactory.newBuilder[K, V1] + b ++= this + b += elem1 + b += elem2 + if (elems.nonEmpty) b ++= elems + b.result() + } +} diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala index 50ddbca30f9e..396e53885081 100644 --- a/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala @@ -11,18 +11,17 @@ */ package scala.collection -import language.experimental.captureChecking /** * Trait that overrides operations on sequences in order * to take advantage of strict builders. */ trait StrictOptimizedSeqOps [+A, +CC[_], +C] - extends AnyRef + extends Any with SeqOps[A, CC, C] with StrictOptimizedIterableOps[A, CC, C] { - override def distinctBy[B](f: A -> B): C = { + override def distinctBy[B](f: A => B): C = { val builder = newSpecificBuilder val seen = mutable.HashSet.empty[B] val it = this.iterator @@ -53,10 +52,10 @@ trait StrictOptimizedSeqOps [+A, +CC[_], +C] b.result() } - override def appendedAll[B >: A](suffix: IterableOnce[B]^): CC[B] = + override def appendedAll[B >: A](suffix: IterableOnce[B]): CC[B] = strictOptimizedConcat(suffix, iterableFactory.newBuilder) - override def prependedAll[B >: A](prefix: IterableOnce[B]^): CC[B] = { + override def prependedAll[B >: A](prefix: IterableOnce[B]): CC[B] = { val b = iterableFactory.newBuilder[B] b ++= prefix b ++= this diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala new file mode 100644 index 000000000000..356bd2883578 --- /dev/null +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala @@ -0,0 +1,29 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +/** + * Trait that overrides set operations to take advantage of strict builders. + * + * @tparam A Elements type + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedSetOps[A, +CC[_], +C <: SetOps[A, CC, C]] + extends SetOps[A, CC, C] + with StrictOptimizedIterableOps[A, CC, C] { + + override def concat(that: IterableOnce[A]): C = + strictOptimizedConcat(that, newSpecificBuilder) + +} diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala new file mode 100644 index 000000000000..1beaf1662abe --- /dev/null +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala @@ -0,0 +1,46 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.implicitNotFound + +/** + * Trait that overrides sorted map operations to take advantage of strict builders. + * + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedSortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends SortedMapOps[K, V, CC, C] + with StrictOptimizedMapOps[K, V, Map, C] { + + override def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedMap(sortedMapFactory.newBuilder, f) + + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedFlatMap(sortedMapFactory.newBuilder, f) + + override def concat[V2 >: V](xs: IterableOnce[(K, V2)]): CC[K, V2] = + strictOptimizedConcat(xs, sortedMapFactory.newBuilder(ordering)) + + override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedCollect(sortedMapFactory.newBuilder, pf) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = { + val m = ((this + elem1).asInstanceOf[Map[K, V]] + elem2).asInstanceOf[CC[K, V1]] + if(elems.isEmpty) m else m.concat(elems).asInstanceOf[CC[K, V1]] + } +} diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala new file mode 100644 index 000000000000..ded7deabccca --- /dev/null +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala @@ -0,0 +1,42 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.implicitNotFound +import scala.annotation.unchecked.uncheckedVariance + +/** + * Trait that overrides sorted set operations to take advantage of strict builders. + * + * @tparam A Elements type + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedSortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SortedSetOps[A, CC, C] + with StrictOptimizedSetOps[A, Set, C] { + + override def map[B](f: A => B)(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + strictOptimizedMap(sortedIterableFactory.newBuilder, f) + + override def flatMap[B](f: A => IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + strictOptimizedFlatMap(sortedIterableFactory.newBuilder, f) + + override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.zipOrdMsg) ev: Ordering[(A @uncheckedVariance, B)]): CC[(A @uncheckedVariance, B)] = + strictOptimizedZip(that, sortedIterableFactory.newBuilder[(A, B)]) + + override def collect[B](pf: PartialFunction[A, B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = + strictOptimizedCollect(sortedIterableFactory.newBuilder, pf) + +} diff --git a/tests/pos-special/stdlib/collection/StringOps.scala b/tests/pos-special/stdlib/collection/StringOps.scala index f570531def98..f0be485af8ae 100644 --- a/tests/pos-special/stdlib/collection/StringOps.scala +++ b/tests/pos-special/stdlib/collection/StringOps.scala @@ -22,7 +22,6 @@ import scala.collection.mutable.StringBuilder import scala.math.{ScalaNumber, max, min} import scala.reflect.ClassTag import scala.util.matching.Regex -import language.experimental.captureChecking object StringOps { // just statics for companion class. @@ -124,7 +123,7 @@ object StringOps { * @return a new collection resulting from applying the given collection-valued function * `f` to each char of this string and concatenating the results. */ - def flatMap[B](f: Char => IterableOnce[B]^): immutable.IndexedSeq[B] = { + def flatMap[B](f: Char => IterableOnce[B]): immutable.IndexedSeq[B] = { val len = s.length val b = immutable.IndexedSeq.newBuilder[B] var i = 0 @@ -156,7 +155,7 @@ object StringOps { } /** Creates a new non-strict filter which combines this filter with the given predicate. */ - def withFilter(q: Char => Boolean): WithFilter^{p, q} = new WithFilter(a => p(a) && q(a), s) + def withFilter(q: Char => Boolean): WithFilter = new WithFilter(a => p(a) && q(a), s) } /** Avoid an allocation in [[collect]]. */ @@ -239,7 +238,7 @@ final class StringOps(private val s: String) extends AnyVal { * @return a new collection resulting from applying the given collection-valued function * `f` to each char of this string and concatenating the results. */ - def flatMap[B](f: Char => IterableOnce[B]^): immutable.IndexedSeq[B] = { + def flatMap[B](f: Char => IterableOnce[B]): immutable.IndexedSeq[B] = { val len = s.length val b = immutable.IndexedSeq.newBuilder[B] var i = 0 @@ -314,7 +313,7 @@ final class StringOps(private val s: String) extends AnyVal { * @return a new collection which contains all chars * of this string followed by all elements of `suffix`. */ - def concat[B >: Char](suffix: IterableOnce[B]^): immutable.IndexedSeq[B] = { + def concat[B >: Char](suffix: IterableOnce[B]): immutable.IndexedSeq[B] = { val b = immutable.IndexedSeq.newBuilder[B] val k = suffix.knownSize b.sizeHint(s.length + (if(k >= 0) k else 16)) @@ -330,7 +329,7 @@ final class StringOps(private val s: String) extends AnyVal { * @return a new string which contains all chars * of this string followed by all chars of `suffix`. */ - def concat(suffix: IterableOnce[Char]^): String = { + def concat(suffix: IterableOnce[Char]): String = { val k = suffix.knownSize val sb = new JStringBuilder(s.length + (if(k >= 0) k else 16)) sb.append(s) @@ -348,10 +347,10 @@ final class StringOps(private val s: String) extends AnyVal { @`inline` def concat(suffix: String): String = s + suffix /** Alias for `concat` */ - @`inline` def ++[B >: Char](suffix: Iterable[B]^): immutable.IndexedSeq[B] = concat(suffix) + @`inline` def ++[B >: Char](suffix: Iterable[B]): immutable.IndexedSeq[B] = concat(suffix) /** Alias for `concat` */ - @`inline` def ++(suffix: IterableOnce[Char]^): String = concat(suffix) + @`inline` def ++(suffix: IterableOnce[Char]): String = concat(suffix) /** Alias for `concat` */ def ++(xs: String): String = concat(xs) @@ -423,7 +422,7 @@ final class StringOps(private val s: String) extends AnyVal { @`inline` def +: (c: Char): String = prepended(c) /** A copy of the string with all elements from a collection prepended */ - def prependedAll[B >: Char](prefix: IterableOnce[B]^): immutable.IndexedSeq[B] = { + def prependedAll[B >: Char](prefix: IterableOnce[B]): immutable.IndexedSeq[B] = { val b = immutable.IndexedSeq.newBuilder[B] val k = prefix.knownSize b.sizeHint(s.length + (if(k >= 0) k else 16)) @@ -433,7 +432,7 @@ final class StringOps(private val s: String) extends AnyVal { } /** Alias for `prependedAll` */ - @`inline` def ++: [B >: Char] (prefix: IterableOnce[B]^): immutable.IndexedSeq[B] = prependedAll(prefix) + @`inline` def ++: [B >: Char] (prefix: IterableOnce[B]): immutable.IndexedSeq[B] = prependedAll(prefix) /** A copy of the string with another string prepended */ def prependedAll(prefix: String): String = prefix + s @@ -461,11 +460,11 @@ final class StringOps(private val s: String) extends AnyVal { @`inline` def :+ (c: Char): String = appended(c) /** A copy of the string with all elements from a collection appended */ - @`inline` def appendedAll[B >: Char](suffix: IterableOnce[B]^): immutable.IndexedSeq[B] = + @`inline` def appendedAll[B >: Char](suffix: IterableOnce[B]): immutable.IndexedSeq[B] = concat(suffix) /** Alias for `appendedAll` */ - @`inline` def :++ [B >: Char](suffix: IterableOnce[B]^): immutable.IndexedSeq[B] = + @`inline` def :++ [B >: Char](suffix: IterableOnce[B]): immutable.IndexedSeq[B] = concat(suffix) /** A copy of the string with another string appended */ @@ -487,7 +486,7 @@ final class StringOps(private val s: String) extends AnyVal { * except that `replaced` chars starting from `from` are replaced * by `other`. */ - def patch[B >: Char](from: Int, other: IterableOnce[B]^, replaced: Int): immutable.IndexedSeq[B] = { + def patch[B >: Char](from: Int, other: IterableOnce[B], replaced: Int): immutable.IndexedSeq[B] = { val len = s.length @`inline` def slc(off: Int, length: Int): WrappedString = new WrappedString(s.substring(off, off+length)) @@ -516,7 +515,7 @@ final class StringOps(private val s: String) extends AnyVal { * by `other`. * @note $unicodeunaware */ - def patch(from: Int, other: IterableOnce[Char]^, replaced: Int): String = + def patch(from: Int, other: IterableOnce[Char], replaced: Int): String = patch(from, other.iterator.mkString, replaced) /** Produces a new string where a slice of characters in this string is replaced by another string. @@ -1196,7 +1195,7 @@ final class StringOps(private val s: String) extends AnyVal { * All these operations apply to those chars of this string * which satisfy the predicate `p`. */ - def withFilter(p: Char => Boolean): StringOps.WithFilter^{p} = new StringOps.WithFilter(p, s) + def withFilter(p: Char => Boolean): StringOps.WithFilter = new StringOps.WithFilter(p, s) /** The rest of the string without its first char. * @note $unicodeunaware @@ -1247,7 +1246,7 @@ final class StringOps(private val s: String) extends AnyVal { def inits: Iterator[String] = iterateUntilEmpty(_.init) // A helper for tails and inits. - private[this] def iterateUntilEmpty(f: String => String): Iterator[String]^{f} = + private[this] def iterateUntilEmpty(f: String => String): Iterator[String] = Iterator.iterate(s)(f).takeWhile(x => !x.isEmpty) ++ Iterator.single("") /** Selects all chars of this string which satisfy a predicate. */ @@ -1465,7 +1464,7 @@ final class StringOps(private val s: String) extends AnyVal { * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. */ - def lazyZip[B](that: Iterable[B]^): LazyZip2[Char, B, String]^{that} = new LazyZip2(s, new WrappedString(s), that) + def lazyZip[B](that: Iterable[B]): LazyZip2[Char, B, String] = new LazyZip2(s, new WrappedString(s), that) /* ************************************************************************************************************ @@ -1513,7 +1512,7 @@ final class StringOps(private val s: String) extends AnyVal { * @return a new string consisting of all the chars of this string without duplicates. * @note $unicodeunaware */ - def distinctBy[B](f: Char -> B): String = new WrappedString(s).distinctBy(f).unwrap + def distinctBy[B](f: Char => B): String = new WrappedString(s).distinctBy(f).unwrap /** Sorts the characters of this string according to an Ordering. * diff --git a/tests/pos-special/stdlib/collection/StringParsers.scala b/tests/pos-special/stdlib/collection/StringParsers.scala new file mode 100644 index 000000000000..5479a58d485f --- /dev/null +++ b/tests/pos-special/stdlib/collection/StringParsers.scala @@ -0,0 +1,319 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.tailrec + +/** A module containing the implementations of parsers from strings to numeric types, and boolean + */ +private[scala] object StringParsers { + + //compile-time constant helpers + + //Int.MinValue == -2147483648 + private final val intOverflowBoundary = -214748364 + private final val intOverflowDigit = 9 + //Long.MinValue == -9223372036854775808L + private final val longOverflowBoundary = -922337203685477580L + private final val longOverflowDigit = 9 + + @inline + private[this] final def decValue(ch: Char): Int = java.lang.Character.digit(ch, 10) + + @inline + private[this] final def stepToOverflow(from: String, len: Int, agg: Int, isPositive: Boolean, min: Int): Option[Int] = { + @tailrec + def rec(i: Int, agg: Int): Option[Int] = + if (agg < min) None + else if (i == len) { + if (!isPositive) Some(agg) + else if (agg == min) None + else Some(-agg) + } + else { + val digit = decValue(from.charAt(i)) + if (digit == -1) None + else rec(i + 1, agg * 10 - digit) + } + rec(1, agg) + } + + @inline + private[this] final def isDigit(c: Char): Boolean = c >= '0' && c <= '9' + + //bool + @inline + final def parseBool(from: String): Option[Boolean] = + if (from.equalsIgnoreCase("true")) Some(true) + else if (from.equalsIgnoreCase("false")) Some(false) + else None + + //integral types + final def parseByte(from: String): Option[Byte] = { + val len = from.length() + //empty strings parse to None + if (len == 0) None + else { + val first = from.charAt(0) + val v = decValue(first) + if (len == 1) { + //"+" and "-" parse to None + if (v > -1) Some(v.toByte) + else None + } + else if (v > -1) stepToOverflow(from, len, -v, true, Byte.MinValue).map(_.toByte) + else if (first == '+') stepToOverflow(from, len, 0, true, Byte.MinValue).map(_.toByte) + else if (first == '-') stepToOverflow(from, len, 0, false, Byte.MinValue).map(_.toByte) + else None + } + } + + final def parseShort(from: String): Option[Short] = { + val len = from.length() + //empty strings parse to None + if (len == 0) None + else { + val first = from.charAt(0) + val v = decValue(first) + if (len == 1) { + //"+" and "-" parse to None + if (v > -1) Some(v.toShort) + else None + } + else if (v > -1) stepToOverflow(from, len, -v, true, Short.MinValue).map(_.toShort) + else if (first == '+') stepToOverflow(from, len, 0, true, Short.MinValue).map(_.toShort) + else if (first == '-') stepToOverflow(from, len, 0, false, Short.MinValue).map(_.toShort) + else None + } + } + + final def parseInt(from: String): Option[Int] = { + val len = from.length() + + @tailrec + def step(i: Int, agg: Int, isPositive: Boolean): Option[Int] = { + if (i == len) { + if (!isPositive) Some(agg) + else if (agg == Int.MinValue) None + else Some(-agg) + } + else if (agg < intOverflowBoundary) None + else { + val digit = decValue(from.charAt(i)) + if (digit == -1 || (agg == intOverflowBoundary && digit == intOverflowDigit)) None + else step(i + 1, (agg * 10) - digit, isPositive) + } + } + //empty strings parse to None + if (len == 0) None + else { + val first = from.charAt(0) + val v = decValue(first) + if (len == 1) { + //"+" and "-" parse to None + if (v > -1) Some(v) + else None + } + else if (v > -1) step(1, -v, true) + else if (first == '+') step(1, 0, true) + else if (first == '-') step(1, 0, false) + else None + } + } + + final def parseLong(from: String): Option[Long] = { + //like parseInt, but Longer + val len = from.length() + + @tailrec + def step(i: Int, agg: Long, isPositive: Boolean): Option[Long] = { + if (i == len) { + if (isPositive && agg == Long.MinValue) None + else if (isPositive) Some(-agg) + else Some(agg) + } + else if (agg < longOverflowBoundary) None + else { + val digit = decValue(from.charAt(i)) + if (digit == -1 || (agg == longOverflowBoundary && digit == longOverflowDigit)) None + else step(i + 1, agg * 10 - digit, isPositive) + } + } + //empty strings parse to None + if (len == 0) None + else { + val first = from.charAt(0) + val v = decValue(first).toLong + if (len == 1) { + //"+" and "-" parse to None + if (v > -1) Some(v) + else None + } + else if (v > -1) step(1, -v, true) + else if (first == '+') step(1, 0, true) + else if (first == '-') step(1, 0, false) + else None + } + } + + //floating point + final def checkFloatFormat(format: String): Boolean = { + //indices are tracked with a start index which points *at* the first index + //and an end index which points *after* the last index + //so that slice length === end - start + //thus start == end <=> empty slice + //and format.substring(start, end) is equivalent to the slice + + //some utilities for working with index bounds into the original string + @inline + def forAllBetween(start: Int, end: Int, pred: Char => Boolean): Boolean = { + @tailrec + def rec(i: Int): Boolean = i >= end || pred(format.charAt(i)) && rec(i + 1) + rec(start) + } + + //one after last index for the predicate to hold, or `from` if none hold + //may point after the end of the string + @inline + def skipIndexWhile(predicate: Char => Boolean, from: Int, until: Int): Int = { + @tailrec @inline + def rec(i: Int): Int = if ((i < until) && predicate(format.charAt(i))) rec(i + 1) + else i + rec(from) + } + + + def isHexFloatLiteral(startIndex: Int, endIndex: Int): Boolean = { + def isHexDigit(ch: Char) = ((ch >= '0' && ch <= '9') || + (ch >= 'a' && ch <= 'f') || + (ch >= 'A' && ch <= 'F')) + + def prefixOK(startIndex: Int, endIndex: Int): Boolean = { + val len = endIndex - startIndex + (len > 0) && { + //the prefix part is + //hexDigits + //hexDigits. + //hexDigits.hexDigits + //.hexDigits + //but not . + if (format.charAt(startIndex) == '.') { + (len > 1) && forAllBetween(startIndex + 1, endIndex, isHexDigit) + } else { + val noLeading = skipIndexWhile(isHexDigit, startIndex, endIndex) + (noLeading >= endIndex) || + ((format.charAt(noLeading) == '.') && forAllBetween(noLeading + 1, endIndex, isHexDigit)) + } + } + } + + def postfixOK(startIndex: Int, endIndex: Int): Boolean = + (startIndex < endIndex) && { + (forAllBetween(startIndex, endIndex, isDigit)) || { + val startchar = format.charAt(startIndex) + (startchar == '+' || startchar == '-') && + (endIndex - startIndex > 1) && + forAllBetween(startIndex + 1, endIndex, isDigit) + } + } + // prefix [pP] postfix + val pIndex = format.indexWhere(ch => ch == 'p' || ch == 'P', startIndex) + (pIndex <= endIndex) && prefixOK(startIndex, pIndex) && postfixOK(pIndex + 1, endIndex) + } + + def isDecFloatLiteral(startIndex: Int, endIndex: Int): Boolean = { + //invariant: endIndex > startIndex + + def isExp(c: Char): Boolean = c == 'e' || c == 'E' + + def expOK(startIndex: Int, endIndex: Int): Boolean = + (startIndex < endIndex) && { + val startChar = format.charAt(startIndex) + if (startChar == '+' || startChar == '-') + (endIndex > (startIndex + 1)) && + skipIndexWhile(isDigit, startIndex + 1, endIndex) == endIndex + else skipIndexWhile(isDigit, startIndex, endIndex) == endIndex + } + + //significant can be one of + //* digits.digits + //* .digits + //* digits. + //but not just . + val startChar = format.charAt(startIndex) + if (startChar == '.') { + val noSignificant = skipIndexWhile(isDigit, startIndex + 1, endIndex) + // a digit is required followed by optional exp + (noSignificant > startIndex + 1) && (noSignificant >= endIndex || + isExp(format.charAt(noSignificant)) && expOK(noSignificant + 1, endIndex) + ) + } + else if (isDigit(startChar)) { + // one set of digits, then optionally a period, then optionally another set of digits, then optionally an exponent + val noInt = skipIndexWhile(isDigit, startIndex, endIndex) + // just the digits + (noInt == endIndex) || { + if (format.charAt(noInt) == '.') { + val noSignificant = skipIndexWhile(isDigit, noInt + 1, endIndex) + (noSignificant >= endIndex) || //no exponent + isExp(format.charAt(noSignificant)) && expOK(noSignificant + 1, endIndex) + } else + isExp(format.charAt(noInt)) && expOK(noInt + 1, endIndex) + } + } + else false + } + + //count 0x00 to 0x20 as "whitespace", and nothing else + val unspacedStart = format.indexWhere(ch => ch.toInt > 0x20) + val unspacedEnd = format.lastIndexWhere(ch => ch.toInt > 0x20) + 1 + + if (unspacedStart == -1 || unspacedStart >= unspacedEnd || unspacedEnd <= 0) false + else { + //all formats can have a sign + val unsigned = { + val startchar = format.charAt(unspacedStart) + if (startchar == '-' || startchar == '+') unspacedStart + 1 else unspacedStart + } + if (unsigned >= unspacedEnd) false + //that's it for NaN and Infinity + else if (format.charAt(unsigned) == 'N') format.substring(unsigned, unspacedEnd) == "NaN" + else if (format.charAt(unsigned) == 'I') format.substring(unsigned, unspacedEnd) == "Infinity" + else { + //all other formats can have a format suffix + val desuffixed = { + val endchar = format.charAt(unspacedEnd - 1) + if (endchar == 'f' || endchar == 'F' || endchar == 'd' || endchar == 'D') unspacedEnd - 1 + else unspacedEnd + } + val len = desuffixed - unsigned + if (len <= 0) false + else if (len >= 2 && (format.charAt(unsigned + 1) == 'x' || format.charAt(unsigned + 1) == 'X')) + format.charAt(unsigned) == '0' && isHexFloatLiteral(unsigned + 2, desuffixed) + else isDecFloatLiteral(unsigned, desuffixed) + } + } + } + + @inline + def parseFloat(from: String): Option[Float] = + if (checkFloatFormat(from)) Some(java.lang.Float.parseFloat(from)) + else None + + @inline + def parseDouble(from: String): Option[Double] = + if (checkFloatFormat(from)) Some(java.lang.Double.parseDouble(from)) + else None + +} diff --git a/tests/pos-special/stdlib/collection/View.scala b/tests/pos-special/stdlib/collection/View.scala index 85910311a4c3..441790c3c6e5 100644 --- a/tests/pos-special/stdlib/collection/View.scala +++ b/tests/pos-special/stdlib/collection/View.scala @@ -15,8 +15,6 @@ package scala.collection import scala.annotation.{nowarn, tailrec} import scala.collection.mutable.{ArrayBuffer, Builder} import scala.collection.immutable.LazyList -import scala.annotation.unchecked.uncheckedCaptures -import language.experimental.captureChecking /** Views are collections whose transformation operations are non strict: the resulting elements * are evaluated only when the view is effectively traversed (e.g. using `foreach` or `foldLeft`), @@ -25,9 +23,8 @@ import language.experimental.captureChecking * @define Coll `View` */ trait View[+A] extends Iterable[A] with IterableOps[A, View, View[A]] with IterableFactoryDefaults[A, View] with Serializable { - this: View[A]^ => - override def view: View[A]^{this} = this + override def view: View[A] = this override def iterableFactory: IterableFactory[View] = View @@ -58,8 +55,8 @@ object View extends IterableFactory[View] { * * @tparam A View element type */ - def fromIteratorProvider[A](it: () => Iterator[A]^): View[A]^{it} = new AbstractView[A] { - def iterator: Iterator[A]^{it} = it() + def fromIteratorProvider[A](it: () => Iterator[A]): View[A] = new AbstractView[A] { + def iterator = it() } /** @@ -70,7 +67,7 @@ object View extends IterableFactory[View] { * * @tparam E View element type */ - def from[E](it: IterableOnce[E]^): View[E]^{it} = it match { + def from[E](it: IterableOnce[E]): View[E] = it match { case it: View[E] => it case it: Iterable[E] => View.fromIteratorProvider(() => it.iterator) case _ => LazyList.from(it).view @@ -100,7 +97,7 @@ object View extends IterableFactory[View] { /** A view with given elements */ @SerialVersionUID(3L) - class Elems[A](xs: A*) extends AbstractView[A], Pure { + class Elems[A](xs: A*) extends AbstractView[A] { def iterator = xs.iterator override def knownSize = xs.knownSize override def isEmpty: Boolean = xs.isEmpty @@ -109,7 +106,7 @@ object View extends IterableFactory[View] { /** A view containing the results of some element computation a number of times. */ @SerialVersionUID(3L) class Fill[A](n: Int)(elem: => A) extends AbstractView[A] { - def iterator: Iterator[A]^{elem} = Iterator.fill(n)(elem) + def iterator = Iterator.fill(n)(elem) override def knownSize: Int = 0 max n override def isEmpty: Boolean = n <= 0 } @@ -117,7 +114,7 @@ object View extends IterableFactory[View] { /** A view containing values of a given function over a range of integer values starting from 0. */ @SerialVersionUID(3L) class Tabulate[A](n: Int)(f: Int => A) extends AbstractView[A] { - def iterator: Iterator[A]^{f} = Iterator.tabulate(n)(f) + def iterator: Iterator[A] = Iterator.tabulate(n)(f) override def knownSize: Int = 0 max n override def isEmpty: Boolean = n <= 0 } @@ -125,7 +122,7 @@ object View extends IterableFactory[View] { /** A view containing repeated applications of a function to a start value */ @SerialVersionUID(3L) class Iterate[A](start: A, len: Int)(f: A => A) extends AbstractView[A] { - def iterator: Iterator[A]^{f} = Iterator.iterate(start)(f).take(len) + def iterator: Iterator[A] = Iterator.iterate(start)(f).take(len) override def knownSize: Int = 0 max len override def isEmpty: Boolean = len <= 0 } @@ -135,7 +132,7 @@ object View extends IterableFactory[View] { */ @SerialVersionUID(3L) class Unfold[A, S](initial: S)(f: S => Option[(A, S)]) extends AbstractView[A] { - def iterator: Iterator[A]^{f} = Iterator.unfold(initial)(f) + def iterator: Iterator[A] = Iterator.unfold(initial)(f) } /** An `IterableOps` whose collection type and collection type constructor are unknown */ @@ -143,14 +140,14 @@ object View extends IterableFactory[View] { /** A view that filters an underlying collection. */ @SerialVersionUID(3L) - class Filter[A](val underlying: SomeIterableOps[A]^, val p: A => Boolean, val isFlipped: Boolean) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying, p} = underlying.iterator.filterImpl(p, isFlipped) + class Filter[A](val underlying: SomeIterableOps[A], val p: A => Boolean, val isFlipped: Boolean) extends AbstractView[A] { + def iterator = underlying.iterator.filterImpl(p, isFlipped) override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = iterator.isEmpty } object Filter { - def apply[A](underlying: Iterable[A]^, p: A => Boolean, isFlipped: Boolean): Filter[A]^{underlying, p} = + def apply[A](underlying: Iterable[A], p: A => Boolean, isFlipped: Boolean): Filter[A] = underlying match { case filter: Filter[A] if filter.isFlipped == isFlipped => new Filter(filter.underlying, a => filter.p(a) && p(a), isFlipped) case _ => new Filter(underlying, p, isFlipped) @@ -159,15 +156,15 @@ object View extends IterableFactory[View] { /** A view that removes the duplicated elements as determined by the transformation function `f` */ @SerialVersionUID(3L) - class DistinctBy[A, B](underlying: SomeIterableOps[A]^, f: A -> B) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying} = underlying.iterator.distinctBy(f) + class DistinctBy[A, B](underlying: SomeIterableOps[A], f: A => B) extends AbstractView[A] { + def iterator: Iterator[A] = underlying.iterator.distinctBy(f) override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = underlying.isEmpty } @SerialVersionUID(3L) - class LeftPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A]^, f: A => Either[A1, A2]) extends AbstractView[A1] { - def iterator: Iterator[A1]^{underlying, f} = new AbstractIterator[A1] { + class LeftPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A], f: A => Either[A1, A2]) extends AbstractView[A1] { + def iterator = new AbstractIterator[A1] { private[this] val self = underlying.iterator private[this] var hd: A1 = _ private[this] var hdDefined: Boolean = false @@ -191,8 +188,8 @@ object View extends IterableFactory[View] { } @SerialVersionUID(3L) - class RightPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A]^, f: A => Either[A1, A2]) extends AbstractView[A2] { - def iterator: Iterator[A2]^{this} = new AbstractIterator[A2] { + class RightPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A], f: A => Either[A1, A2]) extends AbstractView[A2] { + def iterator = new AbstractIterator[A2] { private[this] val self = underlying.iterator private[this] var hd: A2 = _ private[this] var hdDefined: Boolean = false @@ -217,8 +214,8 @@ object View extends IterableFactory[View] { /** A view that drops leading elements of the underlying collection. */ @SerialVersionUID(3L) - class Drop[A](underlying: SomeIterableOps[A]^, n: Int) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying} = underlying.iterator.drop(n) + class Drop[A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { + def iterator = underlying.iterator.drop(n) protected val normN = n max 0 override def knownSize = { val size = underlying.knownSize @@ -229,8 +226,8 @@ object View extends IterableFactory[View] { /** A view that drops trailing elements of the underlying collection. */ @SerialVersionUID(3L) - class DropRight[A](underlying: SomeIterableOps[A]^, n: Int) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying} = dropRightIterator(underlying.iterator, n) + class DropRight[A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { + def iterator = dropRightIterator(underlying.iterator, n) protected val normN = n max 0 override def knownSize = { val size = underlying.knownSize @@ -242,16 +239,16 @@ object View extends IterableFactory[View] { } @SerialVersionUID(3L) - class DropWhile[A](underlying: SomeIterableOps[A]^, p: A => Boolean) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying, p} = underlying.iterator.dropWhile(p) + class DropWhile[A](underlying: SomeIterableOps[A], p: A => Boolean) extends AbstractView[A] { + def iterator = underlying.iterator.dropWhile(p) override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = iterator.isEmpty } /** A view that takes leading elements of the underlying collection. */ @SerialVersionUID(3L) - class Take[+A](underlying: SomeIterableOps[A]^, n: Int) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying} = underlying.iterator.take(n) + class Take[+A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { + def iterator = underlying.iterator.take(n) protected val normN = n max 0 override def knownSize = { val size = underlying.knownSize @@ -262,8 +259,8 @@ object View extends IterableFactory[View] { /** A view that takes trailing elements of the underlying collection. */ @SerialVersionUID(3L) - class TakeRight[+A](underlying: SomeIterableOps[A]^, n: Int) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying} = takeRightIterator(underlying.iterator, n) + class TakeRight[+A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { + def iterator = takeRightIterator(underlying.iterator, n) protected val normN = n max 0 override def knownSize = { val size = underlying.knownSize @@ -275,15 +272,15 @@ object View extends IterableFactory[View] { } @SerialVersionUID(3L) - class TakeWhile[A](underlying: SomeIterableOps[A]^, p: A => Boolean) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying, p} = underlying.iterator.takeWhile(p) + class TakeWhile[A](underlying: SomeIterableOps[A], p: A => Boolean) extends AbstractView[A] { + def iterator: Iterator[A] = underlying.iterator.takeWhile(p) override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = iterator.isEmpty } @SerialVersionUID(3L) - class ScanLeft[+A, +B](underlying: SomeIterableOps[A]^, z: B, op: (B, A) => B) extends AbstractView[B] { - def iterator: Iterator[B]^{underlying, op} = underlying.iterator.scanLeft(z)(op) + class ScanLeft[+A, +B](underlying: SomeIterableOps[A], z: B, op: (B, A) => B) extends AbstractView[B] { + def iterator: Iterator[B] = underlying.iterator.scanLeft(z)(op) override def knownSize: Int = { val size = underlying.knownSize if (size >= 0) size + 1 else -1 @@ -293,32 +290,32 @@ object View extends IterableFactory[View] { /** A view that maps elements of the underlying collection. */ @SerialVersionUID(3L) - class Map[+A, +B](underlying: SomeIterableOps[A]^, f: A => B) extends AbstractView[B] { - def iterator: Iterator[B]^{underlying, f} = underlying.iterator.map(f) + class Map[+A, +B](underlying: SomeIterableOps[A], f: A => B) extends AbstractView[B] { + def iterator = underlying.iterator.map(f) override def knownSize = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } /** A view that flatmaps elements of the underlying collection. */ @SerialVersionUID(3L) - class FlatMap[A, B](underlying: SomeIterableOps[A]^, f: A => IterableOnce[B]^) extends AbstractView[B] { - def iterator: Iterator[B]^{underlying, f} = underlying.iterator.flatMap(f) + class FlatMap[A, B](underlying: SomeIterableOps[A], f: A => IterableOnce[B]) extends AbstractView[B] { + def iterator = underlying.iterator.flatMap(f) override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = iterator.isEmpty } /** A view that collects elements of the underlying collection. */ @SerialVersionUID(3L) - class Collect[+A, B](underlying: SomeIterableOps[A]^, pf: PartialFunction[A, B]^) extends AbstractView[B] { - def iterator: Iterator[B]^{underlying, pf} = underlying.iterator.collect(pf) + class Collect[+A, B](underlying: SomeIterableOps[A], pf: PartialFunction[A, B]) extends AbstractView[B] { + def iterator = underlying.iterator.collect(pf) } /** A view that concatenates elements of the prefix collection or iterator with the elements * of the suffix collection or iterator. */ @SerialVersionUID(3L) - class Concat[A](prefix: SomeIterableOps[A]^, suffix: SomeIterableOps[A]^) extends AbstractView[A] { - def iterator: Iterator[A]^{prefix, suffix} = prefix.iterator ++ suffix.iterator + class Concat[A](prefix: SomeIterableOps[A], suffix: SomeIterableOps[A]) extends AbstractView[A] { + def iterator = prefix.iterator ++ suffix.iterator override def knownSize = { val prefixSize = prefix.knownSize if (prefixSize >= 0) { @@ -335,8 +332,8 @@ object View extends IterableFactory[View] { * of another collection. */ @SerialVersionUID(3L) - class Zip[A, B](underlying: SomeIterableOps[A]^, other: Iterable[B]^) extends AbstractView[(A, B)] { - def iterator: Iterator[(A, B)]^{underlying, other} = underlying.iterator.zip(other) + class Zip[A, B](underlying: SomeIterableOps[A], other: Iterable[B]) extends AbstractView[(A, B)] { + def iterator = underlying.iterator.zip(other) override def knownSize = { val s1 = underlying.knownSize if (s1 == 0) 0 else { @@ -352,8 +349,8 @@ object View extends IterableFactory[View] { * placeholder elements are used to extend the shorter collection to the length of the longer. */ @SerialVersionUID(3L) - class ZipAll[A, B](underlying: SomeIterableOps[A]^, other: Iterable[B]^, thisElem: A, thatElem: B) extends AbstractView[(A, B)] { - def iterator: Iterator[(A, B)]^{underlying, other} = underlying.iterator.zipAll(other, thisElem, thatElem) + class ZipAll[A, B](underlying: SomeIterableOps[A], other: Iterable[B], thisElem: A, thatElem: B) extends AbstractView[(A, B)] { + def iterator = underlying.iterator.zipAll(other, thisElem, thatElem) override def knownSize = { val s1 = underlying.knownSize if(s1 == -1) -1 else { @@ -366,10 +363,8 @@ object View extends IterableFactory[View] { /** A view that appends an element to its elements */ @SerialVersionUID(3L) - class Appended[+A](underlying: SomeIterableOps[A]^, elem: A) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying} = - val ct = new Concat(underlying, new View.Single(elem)) - ct.iterator // CC TODO breakout into `ct` needed, otherwise "cannot establish a reference" error + class Appended[+A](underlying: SomeIterableOps[A], elem: A) extends AbstractView[A] { + def iterator: Iterator[A] = new Concat(underlying, new View.Single(elem)).iterator override def knownSize: Int = { val size = underlying.knownSize if (size >= 0) size + 1 else -1 @@ -379,10 +374,8 @@ object View extends IterableFactory[View] { /** A view that prepends an element to its elements */ @SerialVersionUID(3L) - class Prepended[+A](elem: A, underlying: SomeIterableOps[A]^) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying} = - val ct = new Concat(new View.Single(elem), underlying) - ct.iterator // CC TODO breakout into `ct` needed, otherwise "cannot establish a reference" error + class Prepended[+A](elem: A, underlying: SomeIterableOps[A]) extends AbstractView[A] { + def iterator: Iterator[A] = new Concat(new View.Single(elem), underlying).iterator override def knownSize: Int = { val size = underlying.knownSize if (size >= 0) size + 1 else -1 @@ -391,8 +384,8 @@ object View extends IterableFactory[View] { } @SerialVersionUID(3L) - class Updated[A](underlying: SomeIterableOps[A]^, index: Int, elem: A) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying} = new AbstractIterator[A] { + class Updated[A](underlying: SomeIterableOps[A], index: Int, elem: A) extends AbstractView[A] { + def iterator: Iterator[A] = new AbstractIterator[A] { private[this] val it = underlying.iterator private[this] var i = 0 def next(): A = { @@ -410,28 +403,28 @@ object View extends IterableFactory[View] { } @SerialVersionUID(3L) - private[collection] class Patched[A](underlying: SomeIterableOps[A]^, from: Int, other: IterableOnce[A]^, replaced: Int) extends AbstractView[A] { + private[collection] class Patched[A](underlying: SomeIterableOps[A], from: Int, other: IterableOnce[A], replaced: Int) extends AbstractView[A] { // we may be unable to traverse `other` more than once, so we need to cache it if that's the case - private val _other: Iterable[A]^{other} = other match { + private val _other: Iterable[A] = other match { case other: Iterable[A] => other case other => LazyList.from(other) } - def iterator: Iterator[A]^{underlying, other} = underlying.iterator.patch(from, _other.iterator, replaced) + def iterator: Iterator[A] = underlying.iterator.patch(from, _other.iterator, replaced) override def knownSize: Int = if (underlying.knownSize == 0 && _other.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = if (knownSize == 0) true else iterator.isEmpty } @SerialVersionUID(3L) - class ZipWithIndex[A](underlying: SomeIterableOps[A]^) extends AbstractView[(A, Int)] { - def iterator: Iterator[(A, Int)]^{underlying} = underlying.iterator.zipWithIndex + class ZipWithIndex[A](underlying: SomeIterableOps[A]) extends AbstractView[(A, Int)] { + def iterator: Iterator[(A, Int)] = underlying.iterator.zipWithIndex override def knownSize: Int = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } @SerialVersionUID(3L) - class PadTo[A](underlying: SomeIterableOps[A]^, len: Int, elem: A) extends AbstractView[A] { - def iterator: Iterator[A]^{underlying} = underlying.iterator.padTo(len, elem) + class PadTo[A](underlying: SomeIterableOps[A], len: Int, elem: A) extends AbstractView[A] { + def iterator: Iterator[A] = underlying.iterator.padTo(len, elem) override def knownSize: Int = { val size = underlying.knownSize @@ -440,7 +433,7 @@ object View extends IterableFactory[View] { override def isEmpty: Boolean = underlying.isEmpty && len <= 0 } - private[collection] def takeRightIterator[A](it: Iterator[A]^, n: Int): Iterator[A]^{it} = { + private[collection] def takeRightIterator[A](it: Iterator[A], n: Int): Iterator[A] = { val k = it.knownSize if(k == 0 || n <= 0) Iterator.empty else if(n == Int.MaxValue) it @@ -448,23 +441,22 @@ object View extends IterableFactory[View] { else new TakeRightIterator[A](it, n) } - private final class TakeRightIterator[A](underlying: Iterator[A]^, maxlen: Int) extends AbstractIterator[A] { - private[this] var current: Iterator[A @uncheckedCaptures]^{underlying} = underlying + private final class TakeRightIterator[A](private[this] var underlying: Iterator[A], maxlen: Int) extends AbstractIterator[A] { private[this] var len: Int = -1 private[this] var pos: Int = 0 private[this] var buf: ArrayBuffer[AnyRef] = _ def init(): Unit = if(buf eq null) { buf = new ArrayBuffer[AnyRef](maxlen min 256) len = 0 - while(current.hasNext) { - val n = current.next().asInstanceOf[AnyRef] + while(underlying.hasNext) { + val n = underlying.next().asInstanceOf[AnyRef] if(pos >= buf.length) buf.addOne(n) else buf(pos) = n pos += 1 if(pos == maxlen) pos = 0 len += 1 } - current = null + underlying = null if(len > maxlen) len = maxlen pos = pos - len if(pos < 0) pos += maxlen @@ -485,7 +477,7 @@ object View extends IterableFactory[View] { x } } - override def drop(n: Int): Iterator[A]^{this} = { + override def drop(n: Int): Iterator[A] = { init() if (n > 0) { len = (len - n) max 0 @@ -495,7 +487,7 @@ object View extends IterableFactory[View] { } } - private[collection] def dropRightIterator[A](it: Iterator[A]^, n: Int): Iterator[A]^{it} = { + private[collection] def dropRightIterator[A](it: Iterator[A], n: Int): Iterator[A] = { if(n <= 0) it else { val k = it.knownSize @@ -504,7 +496,7 @@ object View extends IterableFactory[View] { } } - private final class DropRightIterator[A](underlying: Iterator[A]^, maxlen: Int) extends AbstractIterator[A] { + private final class DropRightIterator[A](private[this] var underlying: Iterator[A], maxlen: Int) extends AbstractIterator[A] { private[this] var len: Int = -1 // known size or -1 if the end of `underlying` has not been seen yet private[this] var pos: Int = 0 private[this] var buf: ArrayBuffer[AnyRef] = _ diff --git a/tests/pos-special/stdlib/collection/WithFilter.scala b/tests/pos-special/stdlib/collection/WithFilter.scala new file mode 100644 index 000000000000..4699abbef5a7 --- /dev/null +++ b/tests/pos-special/stdlib/collection/WithFilter.scala @@ -0,0 +1,70 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +/** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods + * of trait `Iterable`. + * + * @tparam A Element type (e.g. `Int`) + * @tparam CC Collection type constructor (e.g. `List`) + * + * @define coll collection + */ +@SerialVersionUID(3L) +abstract class WithFilter[+A, +CC[_]] extends Serializable { + + /** Builds a new collection by applying a function to all elements of the + * `filtered` outer $coll. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying + * the given function `f` to each element of the filtered outer $coll + * and collecting the results. + */ + def map[B](f: A => B): CC[B] + + /** Builds a new collection by applying a function to all elements of the + * `filtered` outer $coll containing this `WithFilter` instance that satisfy + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying + * the given collection-valued function `f` to each element + * of the filtered outer $coll and + * concatenating the results. + */ + def flatMap[B](f: A => IterableOnce[B]): CC[B] + + /** Applies a function `f` to all elements of the `filtered` outer $coll. + * + * @param f the function that is applied for its side-effect to every element. + * The result of function `f` is discarded. + * + * @tparam U the type parameter describing the result of function `f`. + * This result will always be ignored. Typically `U` is `Unit`, + * but this is not necessary. + */ + def foreach[U](f: A => U): Unit + + /** Further refines the filter for this `filtered` $coll. + * + * @param q the predicate used to test elements. + * @return an object of class `WithFilter`, which supports + * `map`, `flatMap`, `foreach`, and `withFilter` operations. + * All these operations apply to those elements of this $coll which + * also satisfy both `p` and `q` predicates. + */ + def withFilter(q: A => Boolean): WithFilter[A, CC] + +} diff --git a/tests/pos-special/stdlib/collection/concurrent/BasicNode.java b/tests/pos-special/stdlib/collection/concurrent/BasicNode.java new file mode 100644 index 000000000000..c6ec91e4fde8 --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/BasicNode.java @@ -0,0 +1,19 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent; + +public abstract class BasicNode { + + public abstract String string(int lev); + +} diff --git a/tests/pos-special/stdlib/collection/concurrent/CNodeBase.java b/tests/pos-special/stdlib/collection/concurrent/CNodeBase.java new file mode 100644 index 000000000000..ddffa365234e --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/CNodeBase.java @@ -0,0 +1,37 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent; + +import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; + +abstract class CNodeBase extends MainNode { + + @SuppressWarnings("unchecked") + public static final AtomicIntegerFieldUpdater> updater = + AtomicIntegerFieldUpdater.newUpdater((Class>) (Class) CNodeBase.class, "csize"); + + public volatile int csize = -1; + + public boolean CAS_SIZE(int oldval, int nval) { + return updater.compareAndSet(this, oldval, nval); + } + + public void WRITE_SIZE(int nval) { + updater.set(this, nval); + } + + public int READ_SIZE() { + return updater.get(this); + } + +} diff --git a/tests/pos-special/stdlib/collection/concurrent/Gen.java b/tests/pos-special/stdlib/collection/concurrent/Gen.java new file mode 100644 index 000000000000..07af2983f32d --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/Gen.java @@ -0,0 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent; + +final class Gen {} diff --git a/tests/pos-special/stdlib/collection/concurrent/INodeBase.java b/tests/pos-special/stdlib/collection/concurrent/INodeBase.java new file mode 100644 index 000000000000..dfb99806594f --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/INodeBase.java @@ -0,0 +1,39 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent; + +import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; + +abstract class INodeBase extends BasicNode { + + @SuppressWarnings("unchecked") + public static final AtomicReferenceFieldUpdater, MainNode> updater = + AtomicReferenceFieldUpdater.newUpdater((Class>) (Class) INodeBase.class, (Class>) (Class) MainNode.class, "mainnode"); + + static final Object RESTART = new Object(); + + static final Object NO_SUCH_ELEMENT_SENTINEL = new Object(); + + public volatile MainNode mainnode = null; + + public final Gen gen; + + public INodeBase(Gen generation) { + gen = generation; + } + + public BasicNode prev() { + return null; + } + +} diff --git a/tests/pos-special/stdlib/collection/concurrent/MainNode.java b/tests/pos-special/stdlib/collection/concurrent/MainNode.java new file mode 100644 index 000000000000..f7f022974e9e --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/MainNode.java @@ -0,0 +1,46 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent; + +import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; + +abstract class MainNode extends BasicNode { + + @SuppressWarnings("unchecked") + public static final AtomicReferenceFieldUpdater, MainNode> updater = + AtomicReferenceFieldUpdater.newUpdater((Class>) (Class) MainNode.class, (Class>) (Class) MainNode.class, "prev"); + + public volatile MainNode prev = null; + + public abstract int cachedSize(Object ct); + + // standard contract + public abstract int knownSize(); + + public boolean CAS_PREV(MainNode oldval, MainNode nval) { + return updater.compareAndSet(this, oldval, nval); + } + + public void WRITE_PREV(MainNode nval) { + updater.set(this, nval); + } + + // do we need this? unclear in the javadocs... + // apparently not - volatile reads are supposed to be safe + // regardless of whether there are concurrent ARFU updates + @Deprecated @SuppressWarnings("unchecked") + public MainNode READ_PREV() { + return (MainNode) updater.get(this); + } + +} diff --git a/tests/pos-special/stdlib/collection/concurrent/Map.scala b/tests/pos-special/stdlib/collection/concurrent/Map.scala new file mode 100644 index 000000000000..c2b996b93102 --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/Map.scala @@ -0,0 +1,192 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.concurrent + +import scala.annotation.tailrec + +/** A template trait for mutable maps that allow concurrent access. + * + * $concurrentmapinfo + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#concurrent_maps "Scala's Collection Library overview"]] + * section on `Concurrent Maps` for more information. + * + * @tparam K the key type of the map + * @tparam V the value type of the map + * + * @define Coll `concurrent.Map` + * @define coll concurrent map + * @define concurrentmapinfo + * This is a base trait for all Scala concurrent map implementations. It + * provides all of the methods a `Map` does, with the difference that all the + * changes are atomic. It also describes methods specific to concurrent maps. + * + * '''Note''': The concurrent maps do not accept `'''null'''` for keys or values. + * + * @define atomicop + * This is an atomic operation. + */ +trait Map[K, V] extends scala.collection.mutable.Map[K, V] { + + /** + * Associates the given key with a given value, unless the key was already + * associated with some other value. + * + * $atomicop + * + * @param k key with which the specified value is to be associated with + * @param v value to be associated with the specified key + * @return `Some(oldvalue)` if there was a value `oldvalue` previously + * associated with the specified key, or `None` if there was no + * mapping for the specified key + */ + def putIfAbsent(k: K, v: V): Option[V] + + /** + * Removes the entry for the specified key if it's currently mapped to the + * specified value. + * + * $atomicop + * + * @param k key for which the entry should be removed + * @param v value expected to be associated with the specified key if + * the removal is to take place + * @return `true` if the removal took place, `false` otherwise + */ + def remove(k: K, v: V): Boolean + + /** + * Replaces the entry for the given key only if it was previously mapped to + * a given value. + * + * $atomicop + * + * @param k key for which the entry should be replaced + * @param oldvalue value expected to be associated with the specified key + * if replacing is to happen + * @param newvalue value to be associated with the specified key + * @return `true` if the entry was replaced, `false` otherwise + */ + def replace(k: K, oldvalue: V, newvalue: V): Boolean + + /** + * Replaces the entry for the given key only if it was previously mapped + * to some value. + * + * $atomicop + * + * @param k key for which the entry should be replaced + * @param v value to be associated with the specified key + * @return `Some(v)` if the given key was previously mapped to some value `v`, or `None` otherwise + */ + def replace(k: K, v: V): Option[V] + + override def getOrElseUpdate(key: K, op: => V): V = get(key) match { + case Some(v) => v + case None => + val v = op + putIfAbsent(key, v) match { + case Some(ov) => ov + case None => v + } + } + + /** + * Removes the entry for the specified key if it's currently mapped to the + * specified value. Comparison to the specified value is done using reference + * equality. + * + * Not all map implementations can support removal based on reference + * equality, and for those implementations, object equality is used instead. + * + * $atomicop + * + * @param k key for which the entry should be removed + * @param v value expected to be associated with the specified key if + * the removal is to take place + * @return `true` if the removal took place, `false` otherwise + */ + // TODO: make part of the API in a future version + private[collection] def removeRefEq(k: K, v: V): Boolean = remove(k, v) + + /** + * Replaces the entry for the given key only if it was previously mapped to + * a given value. Comparison to the specified value is done using reference + * equality. + * + * Not all map implementations can support replacement based on reference + * equality, and for those implementations, object equality is used instead. + * + * $atomicop + * + * @param k key for which the entry should be replaced + * @param oldValue value expected to be associated with the specified key + * if replacing is to happen + * @param newValue value to be associated with the specified key + * @return `true` if the entry was replaced, `false` otherwise + */ + // TODO: make part of the API in a future version + private[collection] def replaceRefEq(k: K, oldValue: V, newValue: V): Boolean = replace(k, oldValue, newValue) + + /** + * Update a mapping for the specified key and its current optionally-mapped value + * (`Some` if there is current mapping, `None` if not). + * + * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`. + * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent). + * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged. + * + * If the map is updated by another concurrent access, the remapping function will be retried until successfully updated. + * + * @param key the key value + * @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping + * @return the new value associated with the specified key + */ + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = updateWithAux(key)(remappingFunction) + + @tailrec + private def updateWithAux(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + val previousValue = get(key) + val nextValue = remappingFunction(previousValue) + previousValue match { + case Some(prev) => nextValue match { + case Some(next) => if (replaceRefEq(key, prev, next)) return nextValue + case _ => if (removeRefEq(key, prev)) return None + } + case _ => nextValue match { + case Some(next) => if (putIfAbsent(key, next).isEmpty) return nextValue + case _ => return None + } + } + updateWithAux(key)(remappingFunction) + } + + private[collection] def filterInPlaceImpl(p: (K, V) => Boolean): this.type = { + val it = iterator + while (it.hasNext) { + val (k, v) = it.next() + if (!p(k, v)) removeRefEq(k, v) + } + this + } + + private[collection] def mapValuesInPlaceImpl(f: (K, V) => V): this.type = { + val it = iterator + while (it.hasNext) { + val (k, v) = it.next() + replaceRefEq(k, v, f(k, v)) + } + this + } +} diff --git a/tests/pos-special/stdlib/collection/concurrent/TrieMap.scala b/tests/pos-special/stdlib/collection/concurrent/TrieMap.scala new file mode 100644 index 000000000000..e4aa8c8c52a7 --- /dev/null +++ b/tests/pos-special/stdlib/collection/concurrent/TrieMap.scala @@ -0,0 +1,1202 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package concurrent + +import java.util.concurrent.atomic._ +import scala.{unchecked => uc} +import scala.annotation.tailrec +import scala.collection.concurrent.TrieMap.RemovalPolicy +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.{List, Nil} +import scala.collection.mutable.GrowableBuilder +import scala.util.Try +import scala.util.hashing.Hashing + +private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: Equiv[K]) extends INodeBase[K, V](g) { + import INodeBase._ + + WRITE(bn) + + def this(g: Gen, equiv: Equiv[K]) = this(null, g, equiv) + + def WRITE(nval: MainNode[K, V]) = INodeBase.updater.set(this, nval) + + def CAS(old: MainNode[K, V], n: MainNode[K, V]) = INodeBase.updater.compareAndSet(this, old, n) + + def gcasRead(ct: TrieMap[K, V]): MainNode[K, V] = GCAS_READ(ct) + + def GCAS_READ(ct: TrieMap[K, V]): MainNode[K, V] = { + val m = /*READ*/mainnode + val prevval = /*READ*/m.prev + if (prevval eq null) m + else GCAS_Complete(m, ct) + } + + @tailrec private def GCAS_Complete(m: MainNode[K, V], ct: TrieMap[K, V]): MainNode[K, V] = if (m eq null) null else { + // complete the GCAS + val prev = /*READ*/m.prev + val ctr = ct.readRoot(abort = true) + + prev match { + case null => + m + case fn: FailedNode[_, _] => // try to commit to previous value + if (CAS(m, fn.prev)) fn.prev + else GCAS_Complete(/*READ*/mainnode, ct) + case vn: MainNode[_, _] => + // Assume that you've read the root from the generation G. + // Assume that the snapshot algorithm is correct. + // ==> you can only reach nodes in generations <= G. + // ==> `gen` is <= G. + // We know that `ctr.gen` is >= G. + // ==> if `ctr.gen` = `gen` then they are both equal to G. + // ==> otherwise, we know that either `ctr.gen` > G, `gen` < G, + // or both + if ((ctr.gen eq gen) && ct.nonReadOnly) { + // try to commit + if (m.CAS_PREV(prev, null)) m + else GCAS_Complete(m, ct) + } else { + // try to abort + m.CAS_PREV(prev, new FailedNode(prev)) + GCAS_Complete(/*READ*/mainnode, ct) + } + } + } + + def GCAS(old: MainNode[K, V], n: MainNode[K, V], ct: TrieMap[K, V]): Boolean = { + n.WRITE_PREV(old) + if (CAS(old, n)) { + GCAS_Complete(n, ct) + /*READ*/n.prev eq null + } else false + } + + private def equal(k1: K, k2: K, ct: TrieMap[K, V]) = ct.equality.equiv(k1, k2) + + private def inode(cn: MainNode[K, V]) = { + val nin = new INode[K, V](gen, equiv) + nin.WRITE(cn) + nin + } + + def copyToGen(ngen: Gen, ct: TrieMap[K, V]) = { + val nin = new INode[K, V](ngen, equiv) + val main = GCAS_READ(ct) + nin.WRITE(main) + nin + } + + /** Inserts a key value pair, overwriting the old pair if the keys match. + * + * @return true if successful, false otherwise + */ + @tailrec def rec_insert(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Boolean = { + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => // 1) a multiway node + val idx = (hc >>> lev) & 0x1f + val flag = 1 << idx + val bmp = cn.bitmap + val mask = flag - 1 + val pos = Integer.bitCount(bmp & mask) + if ((bmp & flag) != 0) { + // 1a) insert below + cn.array(pos) match { + case in: INode[K, V] @uc => + if (startgen eq in.gen) in.rec_insert(k, v, hc, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insert(k, v, hc, lev, parent, startgen, ct) + else false + } + case sn: SNode[K, V] @uc => + if (sn.hc == hc && equal(sn.k, k, ct)) GCAS(cn, cn.updatedAt(pos, new SNode(sn.k, v, hc), gen), ct) + else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen, equiv)), gen) + GCAS(cn, nn, ct) + } + case basicNode => throw new MatchError(basicNode) + } + } else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val ncnode = rn.insertedAt(pos, flag, k, v, hc, gen) + GCAS(cn, ncnode, ct) + } + case tn: TNode[K, V] => + clean(parent, ct, lev - 5) + false + case ln: LNode[K, V] => // 3) an l-node + val nn = ln.inserted(k, v) + GCAS(ln, nn, ct) + case mainNode => throw new MatchError(mainNode) + } + } + + + + /** Inserts a new key value pair, given that a specific condition is met. + * + * @param cond KEY_PRESENT_OR_ABSENT - don't care if the key was there, insert or overwrite + * KEY_ABSENT - key wasn't there, insert only, do not overwrite + * KEY_PRESENT - key was there, overwrite only, do not insert + * other value `v` - only overwrite if the current value is this + * @param fullEquals whether to use reference or full equals when comparing `v` to the current value + * @param hc the hashcode of `k` + * + * @return null if unsuccessful, Option[V] otherwise (indicating previous value bound to the key) + */ + @tailrec def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, fullEquals: Boolean, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = { + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => // 1) a multiway node + val idx = (hc >>> lev) & 0x1f + val flag = 1 << idx + val bmp = cn.bitmap + val mask = flag - 1 + val pos = Integer.bitCount(bmp & mask) + if ((bmp & flag) != 0) { + // 1a) insert below + cn.array(pos) match { + case in: INode[K, V] @uc => + if (startgen eq in.gen) in.rec_insertif(k, v, hc, cond, fullEquals, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insertif(k, v, hc, cond, fullEquals, lev, parent, startgen, ct) + else null + } + case sn: SNode[K, V] @uc => cond match { + case INode.KEY_PRESENT_OR_ABSENT => + if (sn.hc == hc && equal(sn.k, k, ct)) { + if (GCAS(cn, cn.updatedAt(pos, new SNode(sn.k, v, hc), gen), ct)) Some(sn.v) else null + } else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen, equiv)), gen) + if (GCAS(cn, nn, ct)) None + else null + } + case INode.KEY_ABSENT => + if (sn.hc == hc && equal(sn.k, k, ct)) Some(sn.v) + else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen, equiv)), gen) + if (GCAS(cn, nn, ct)) None + else null + } + case INode.KEY_PRESENT => + if (sn.hc == hc && equal(sn.k, k, ct)) { + if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null + } else None + case otherv => + if (sn.hc == hc && equal(sn.k, k, ct) && (if (fullEquals) sn.v == otherv else sn.v.asInstanceOf[AnyRef] eq otherv)) { + if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null + } else None + } + case basicNode => throw new MatchError(basicNode) + } + } else cond match { + case INode.KEY_PRESENT_OR_ABSENT | INode.KEY_ABSENT => + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val ncnode = rn.insertedAt(pos, flag, k, v, hc, gen) + if (GCAS(cn, ncnode, ct)) None else null + case INode.KEY_PRESENT => None + case otherv => None + } + case sn: TNode[K, V] => + clean(parent, ct, lev - 5) + null + case ln: LNode[K, V] => // 3) an l-node + def insertln() = { + val nn = ln.inserted(k, v) + GCAS(ln, nn, ct) + } + cond match { + case INode.KEY_PRESENT_OR_ABSENT => + val optv = ln.get(k) + if (insertln()) optv else null + case INode.KEY_ABSENT => + ln.get(k) match { + case None => if (insertln()) None else null + case optv => optv + } + case INode.KEY_PRESENT => + ln.get(k) match { + case Some(v0) => if (insertln()) Some(v0) else null + case None => None + } + case otherv => + ln.get(k) match { + case Some(v0) if (if (fullEquals) v0 == otherv else v0.asInstanceOf[AnyRef] eq otherv) => + if (insertln()) Some(otherv.asInstanceOf[V]) else null + case _ => None + } + } + case mainNode => throw new MatchError(mainNode) + } + } + + /** Looks up the value associated with the key. + * + * @param hc the hashcode of `k` + * + * @return NO_SUCH_ELEMENT_SENTINEL if no value has been found, RESTART if the operation wasn't successful, + * or any other value otherwise + */ + @tailrec def rec_lookup(k: K, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): AnyRef = { + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => // 1) a multinode + val idx = (hc >>> lev) & 0x1f + val flag = 1 << idx + val bmp = cn.bitmap + if ((bmp & flag) == 0) NO_SUCH_ELEMENT_SENTINEL // 1a) bitmap shows no binding + else { // 1b) bitmap contains a value - descend + val pos = if (bmp == 0xffffffff) idx else Integer.bitCount(bmp & (flag - 1)) + val sub = cn.array(pos) + sub match { + case in: INode[K, V] @uc => + if (ct.isReadOnly || (startgen eq in.gen)) in.rec_lookup(k, hc, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_lookup(k, hc, lev, parent, startgen, ct) + else RESTART + } + case sn: SNode[K, V] @uc => // 2) singleton node + if (sn.hc == hc && equal(sn.k, k, ct)) sn.v.asInstanceOf[AnyRef] + else NO_SUCH_ELEMENT_SENTINEL + case basicNode => throw new MatchError(basicNode) + } + } + case tn: TNode[_, _] => // 3) non-live node + def cleanReadOnly(tn: TNode[K, V]) = if (ct.nonReadOnly) { + clean(parent, ct, lev - 5) + RESTART + } else { + if (tn.hc == hc && tn.k == k) tn.v.asInstanceOf[AnyRef] + else NO_SUCH_ELEMENT_SENTINEL + } + cleanReadOnly(tn) + case ln: LNode[K, V] => // 5) an l-node + ln.get(k).asInstanceOf[Option[AnyRef]].getOrElse(NO_SUCH_ELEMENT_SENTINEL) + case mainNode => throw new MatchError(mainNode) + } + } + + /** Removes the key associated with the given value. + * + * @param hc the hashcode of `k` + * + * @param removalPolicy policy deciding whether to remove `k` based on `v` and the + * current value associated with `k` (Always, FullEquals, or ReferenceEq) + * + * @return null if not successful, an Option[V] indicating the previous value otherwise + */ + def rec_remove( + k: K, + v: V, + removalPolicy: Int, + hc: Int, + lev: Int, + parent: INode[K, V], + startgen: Gen, + ct: TrieMap[K, V]): Option[V] = { + + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => + val idx = (hc >>> lev) & 0x1f + val bmp = cn.bitmap + val flag = 1 << idx + if ((bmp & flag) == 0) None + else { + val pos = Integer.bitCount(bmp & (flag - 1)) + val sub = cn.array(pos) + val res = sub match { + case in: INode[K, V] @uc => + if (startgen eq in.gen) in.rec_remove(k, v, removalPolicy, hc, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_remove(k, v, removalPolicy, hc, lev, parent, startgen, ct) + else null + } + case sn: SNode[K, V] @uc => + if (sn.hc == hc && equal(sn.k, k, ct) && RemovalPolicy.shouldRemove(removalPolicy)(sn.v, v)) { + val ncn = cn.removedAt(pos, flag, gen).toContracted(lev) + if (GCAS(cn, ncn, ct)) Some(sn.v) else null + } else None + case basicNode => throw new MatchError(basicNode) + } + + if (res == None || (res eq null)) res + else { + @tailrec def cleanParent(nonlive: AnyRef): Unit = { + val pm = parent.GCAS_READ(ct) + pm match { + case cn: CNode[K, V] => + val idx = (hc >>> (lev - 5)) & 0x1f + val bmp = cn.bitmap + val flag = 1 << idx + if ((bmp & flag) == 0) {} // somebody already removed this i-node, we're done + else { + val pos = Integer.bitCount(bmp & (flag - 1)) + val sub = cn.array(pos) + if (sub eq this) (nonlive: @uc) match { + case tn: TNode[K, V] @uc => + val ncn = cn.updatedAt(pos, tn.copyUntombed, gen).toContracted(lev - 5) + if (!parent.GCAS(cn, ncn, ct)) + if (ct.readRoot().gen == startgen) cleanParent(nonlive) + } + } + case _ => // parent is no longer a cnode, we're done + } + } + + if (parent ne null) { // never tomb at root + val n = GCAS_READ(ct) + if (n.isInstanceOf[TNode[_, _]]) + cleanParent(n) + } + + res + } + } + case tn: TNode[K, V] => + clean(parent, ct, lev - 5) + null + case ln: LNode[K, V] => + if (removalPolicy == RemovalPolicy.Always) { + val optv = ln.get(k) + val nn = ln.removed(k, ct) + if (GCAS(ln, nn, ct)) optv else null + } else ln.get(k) match { + case optv @ Some(v0) if RemovalPolicy.shouldRemove(removalPolicy)(v, v0) => + val nn = ln.removed(k, ct) + if (GCAS(ln, nn, ct)) optv else null + case _ => None + } + case mainNode => throw new MatchError(mainNode) + } + } + + private def clean(nd: INode[K, V], ct: TrieMap[K, V], lev: Int): Unit = { + val m = nd.GCAS_READ(ct) + m match { + case cn: CNode[K, V] => nd.GCAS(cn, cn.toCompressed(ct, lev, gen), ct) + case _ => + } + } + + def isNullInode(ct: TrieMap[K, V]) = GCAS_READ(ct) eq null + + def cachedSize(ct: TrieMap[K, V]): Int = + GCAS_READ(ct).cachedSize(ct) + + def knownSize(ct: TrieMap[K, V]): Int = + GCAS_READ(ct).knownSize() + + /* this is a quiescent method! */ + def string(lev: Int) = "%sINode -> %s".format(" " * lev, mainnode match { + case null => "" + case tn: TNode[_, _] => "TNode(%s, %s, %d, !)".format(tn.k, tn.v, tn.hc) + case cn: CNode[_, _] => cn.string(lev) + case ln: LNode[_, _] => ln.string(lev) + case x => "".format(x) + }) + +} + + +private[concurrent] object INode { + //////////////////////////////////////////////////////////////////////////////////////////////////// + // Arguments for `cond` argument in TrieMap#rec_insertif + //////////////////////////////////////////////////////////////////////////////////////////////////// + final val KEY_PRESENT = new AnyRef + final val KEY_ABSENT = new AnyRef + final val KEY_PRESENT_OR_ABSENT = new AnyRef + + def newRootNode[K, V](equiv: Equiv[K]) = { + val gen = new Gen + val cn = new CNode[K, V](0, new Array(0), gen) + new INode[K, V](cn, gen, equiv) + } +} + + +private[concurrent] final class FailedNode[K, V](p: MainNode[K, V]) extends MainNode[K, V] { + WRITE_PREV(p) + + def string(lev: Int) = throw new UnsupportedOperationException + + def cachedSize(ct: AnyRef): Int = throw new UnsupportedOperationException + + def knownSize: Int = throw new UnsupportedOperationException + + override def toString = "FailedNode(%s)".format(p) +} + + +private[concurrent] trait KVNode[K, V] { + def kvPair: (K, V) +} + + +private[collection] final class SNode[K, V](final val k: K, final val v: V, final val hc: Int) + extends BasicNode with KVNode[K, V] { + def copy = new SNode(k, v, hc) + def copyTombed = new TNode(k, v, hc) + def copyUntombed = new SNode(k, v, hc) + def kvPair = (k, v) + def string(lev: Int) = (" " * lev) + "SNode(%s, %s, %x)".format(k, v, hc) +} + +// Tomb Node, used to ensure proper ordering during removals +private[collection] final class TNode[K, V](final val k: K, final val v: V, final val hc: Int) + extends MainNode[K, V] with KVNode[K, V] { + def copy = new TNode(k, v, hc) + def copyTombed = new TNode(k, v, hc) + def copyUntombed = new SNode(k, v, hc) + def kvPair = (k, v) + def cachedSize(ct: AnyRef): Int = 1 + def knownSize: Int = 1 + def string(lev: Int) = (" " * lev) + "TNode(%s, %s, %x, !)".format(k, v, hc) +} + +// List Node, leaf node that handles hash collisions +private[collection] final class LNode[K, V](val entries: List[(K, V)], equiv: Equiv[K]) + extends MainNode[K, V] { + + def this(k: K, v: V, equiv: Equiv[K]) = this((k -> v) :: Nil, equiv) + + def this(k1: K, v1: V, k2: K, v2: V, equiv: Equiv[K]) = + this(if (equiv.equiv(k1, k2)) (k2 -> v2) :: Nil else (k1 -> v1) :: (k2 -> v2) :: Nil, equiv) + + def inserted(k: K, v: V) = { + var k0: K = k + @tailrec + def remove(elems: List[(K, V)], acc: List[(K, V)]): List[(K, V)] = { + if (elems.isEmpty) acc + else if (equiv.equiv(elems.head._1, k)) { + k0 = elems.head._1 + acc ::: elems.tail + } else remove(elems.tail, elems.head :: acc) + } + val e = remove(entries, Nil) + new LNode((k0 -> v) :: e, equiv) + } + + def removed(k: K, ct: TrieMap[K, V]): MainNode[K, V] = { + val updmap = entries.filterNot(entry => equiv.equiv(entry._1, k)) + if (updmap.sizeIs > 1) new LNode(updmap, equiv) + else { + val (k, v) = updmap.iterator.next() + new TNode(k, v, ct.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses + } + } + + def get(k: K): Option[V] = entries.find(entry => equiv.equiv(entry._1, k)).map(_._2) + + def cachedSize(ct: AnyRef): Int = entries.size + + def knownSize: Int = -1 // shouldn't ever be empty, and the size of a list is not known + + def string(lev: Int) = (" " * lev) + "LNode(%s)".format(entries.mkString(", ")) + +} + +// Ctrie Node, contains bitmap and array of references to branch nodes +private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[BasicNode], val gen: Gen) extends CNodeBase[K, V] { + // this should only be called from within read-only snapshots + def cachedSize(ct: AnyRef): Int = { + val currsz = READ_SIZE() + if (currsz != -1) currsz + else { + val sz = computeSize(ct.asInstanceOf[TrieMap[K, V]]) + while (READ_SIZE() == -1) CAS_SIZE(-1, sz) + READ_SIZE() + } + } + + def knownSize: Int = READ_SIZE() // this should only ever return -1 if unknown + + // lends itself towards being parallelizable by choosing + // a random starting offset in the array + // => if there are concurrent size computations, they start + // at different positions, so they are more likely to + // to be independent + private def computeSize(ct: TrieMap[K, V]): Int = { + var i = 0 + var sz = 0 + val offset = + if (array.length > 0) + //util.Random.nextInt(array.length) /* <-- benchmarks show that this causes observable contention */ + java.util.concurrent.ThreadLocalRandom.current.nextInt(0, array.length) + else 0 + while (i < array.length) { + val pos = (i + offset) % array.length + array(pos) match { + case sn: SNode[_, _] => sz += 1 + case in: INode[K, V] @uc => sz += in.cachedSize(ct) + case basicNode => throw new MatchError(basicNode) + } + i += 1 + } + sz + } + + def updatedAt(pos: Int, nn: BasicNode, gen: Gen) = { + val len = array.length + val narr = new Array[BasicNode](len) + Array.copy(array, 0, narr, 0, len) + narr(pos) = nn + new CNode[K, V](bitmap, narr, gen) + } + + def removedAt(pos: Int, flag: Int, gen: Gen) = { + val arr = array + val len = arr.length + val narr = new Array[BasicNode](len - 1) + Array.copy(arr, 0, narr, 0, pos) + Array.copy(arr, pos + 1, narr, pos, len - pos - 1) + new CNode[K, V](bitmap ^ flag, narr, gen) + } + + def insertedAt(pos: Int, flag: Int, k: K, v: V, hc: Int, gen: Gen) = { + val len = array.length + val bmp = bitmap + val narr = new Array[BasicNode](len + 1) + Array.copy(array, 0, narr, 0, pos) + narr(pos) = new SNode(k, v, hc) + Array.copy(array, pos, narr, pos + 1, len - pos) + new CNode[K, V](bmp | flag, narr, gen) + } + + /** Returns a copy of this cnode such that all the i-nodes below it are copied + * to the specified generation `ngen`. + */ + def renewed(ngen: Gen, ct: TrieMap[K, V]) = { + var i = 0 + val arr = array + val len = arr.length + val narr = new Array[BasicNode](len) + while (i < len) { + arr(i) match { + case in: INode[K, V] @uc => narr(i) = in.copyToGen(ngen, ct) + case bn: BasicNode => narr(i) = bn + } + i += 1 + } + new CNode[K, V](bitmap, narr, ngen) + } + + private def resurrect(inode: INode[K, V], inodemain: AnyRef): BasicNode = inodemain match { + case tn: TNode[_, _] => tn.copyUntombed + case _ => inode + } + + def toContracted(lev: Int): MainNode[K, V] = if (array.length == 1 && lev > 0) array(0) match { + case sn: SNode[K, V] @uc => sn.copyTombed + case _ => this + } else this + + // - if the branching factor is 1 for this CNode, and the child + // is a tombed SNode, returns its tombed version + // - otherwise, if there is at least one non-null node below, + // returns the version of this node with at least some null-inodes + // removed (those existing when the op began) + // - if there are only null-i-nodes below, returns null + def toCompressed(ct: TrieMap[K, V], lev: Int, gen: Gen) = { + val bmp = bitmap + var i = 0 + val arr = array + val tmparray = new Array[BasicNode](arr.length) + while (i < arr.length) { // construct new bitmap + val sub = arr(i) + sub match { + case in: INode[K, V] @uc => + val inodemain = in.gcasRead(ct) + assert(inodemain ne null) + tmparray(i) = resurrect(in, inodemain) + case sn: SNode[K, V] @uc => + tmparray(i) = sn + case basicNode => throw new MatchError(basicNode) + } + i += 1 + } + + new CNode[K, V](bmp, tmparray, gen).toContracted(lev) + } + + def string(lev: Int): String = "CNode %x\n%s".format(bitmap, array.map(_.string(lev + 1)).mkString("\n")) + + override def toString = { + def elems: Seq[String] = array.flatMap { + case sn: SNode[K, V] @uc => Iterable.single(sn.kvPair._2.toString) + case in: INode[K, V] @uc => Iterable.single(augmentString(in.toString).drop(14) + "(" + in.gen + ")") + case basicNode => throw new MatchError(basicNode) + } + f"CNode(sz: ${elems.size}%d; ${elems.sorted.mkString(", ")})" + } +} + +private[concurrent] object CNode { + + def dual[K, V](x: SNode[K, V], xhc: Int, y: SNode[K, V], yhc: Int, lev: Int, gen: Gen, equiv: Equiv[K]): MainNode[K, V] = if (lev < 35) { + val xidx = (xhc >>> lev) & 0x1f + val yidx = (yhc >>> lev) & 0x1f + val bmp = (1 << xidx) | (1 << yidx) + if (xidx == yidx) { + val subinode = new INode[K, V](gen, equiv)//(TrieMap.inodeupdater) + subinode.mainnode = dual(x, xhc, y, yhc, lev + 5, gen, equiv) + new CNode(bmp, Array(subinode), gen) + } else { + if (xidx < yidx) new CNode(bmp, Array(x, y), gen) + else new CNode(bmp, Array(y, x), gen) + } + } else { + new LNode(x.k, x.v, y.k, y.v, equiv) + } + +} + + +private[concurrent] case class RDCSS_Descriptor[K, V](old: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]) { + @volatile var committed = false +} + + +/** A concurrent hash-trie or TrieMap is a concurrent thread-safe lock-free + * implementation of a hash array mapped trie. It is used to implement the + * concurrent map abstraction. It has particularly scalable concurrent insert + * and remove operations and is memory-efficient. It supports O(1), atomic, + * lock-free snapshots which are used to implement linearizable lock-free size, + * iterator and clear operations. The cost of evaluating the (lazy) snapshot is + * distributed across subsequent updates, thus making snapshot evaluation horizontally scalable. + * + * For details, see: [[http://lampwww.epfl.ch/~prokopec/ctries-snapshot.pdf]] + */ +@SerialVersionUID(-5212455458703321708L) +final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[TrieMap[K, V], AnyRef], hashf: Hashing[K], ef: Equiv[K]) + extends scala.collection.mutable.AbstractMap[K, V] + with scala.collection.concurrent.Map[K, V] + with scala.collection.mutable.MapOps[K, V, TrieMap, TrieMap[K, V]] + with scala.collection.MapFactoryDefaults[K, V, TrieMap, mutable.Iterable] + with DefaultSerializable { + + private[this] var hashingobj = if (hashf.isInstanceOf[Hashing.Default[_]]) new TrieMap.MangledHashing[K] else hashf + private[this] var equalityobj = ef + @transient + private[this] var rootupdater = rtupd + def hashing = hashingobj + def equality = equalityobj + @volatile private var root = r + + def this(hashf: Hashing[K], ef: Equiv[K]) = this( + INode.newRootNode(ef), + AtomicReferenceFieldUpdater.newUpdater(classOf[TrieMap[K, V]], classOf[AnyRef], "root"), + hashf, + ef + ) + + def this() = this(Hashing.default, Equiv.universal) + + override def mapFactory: MapFactory[TrieMap] = TrieMap + + /* internal methods */ + + private def writeObject(out: java.io.ObjectOutputStream): Unit = { + out.writeObject(hashingobj) + out.writeObject(equalityobj) + + val it = iterator + while (it.hasNext) { + val (k, v) = it.next() + out.writeObject(k) + out.writeObject(v) + } + out.writeObject(TrieMapSerializationEnd) + } + + private def readObject(in: java.io.ObjectInputStream): Unit = { + root = INode.newRootNode(equality) + rootupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[TrieMap[K, V]], classOf[AnyRef], "root") + + hashingobj = in.readObject().asInstanceOf[Hashing[K]] + equalityobj = in.readObject().asInstanceOf[Equiv[K]] + + var obj: AnyRef = in.readObject() + + while (obj != TrieMapSerializationEnd) { + obj = in.readObject() + if (obj != TrieMapSerializationEnd) { + val k = obj.asInstanceOf[K] + val v = in.readObject().asInstanceOf[V] + update(k, v) + } + } + } + + private def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv) + + private[collection] def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort) + + private[concurrent] def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = { + val r = /*READ*/root + r match { + case in: INode[K, V] @uc => in + case desc: RDCSS_Descriptor[K, V] @uc => RDCSS_Complete(abort) + case x => throw new MatchError(x) + } + } + + @tailrec private def RDCSS_Complete(abort: Boolean): INode[K, V] = { + val v = /*READ*/root + v match { + case in: INode[K, V] @uc => in + case desc: RDCSS_Descriptor[K, V] @uc => + val RDCSS_Descriptor(ov, exp, nv) = desc + if (abort) { + if (CAS_ROOT(desc, ov)) ov + else RDCSS_Complete(abort) + } else { + val oldmain = ov.gcasRead(this) + if (oldmain eq exp) { + if (CAS_ROOT(desc, nv)) { + desc.committed = true + nv + } else RDCSS_Complete(abort) + } else { + if (CAS_ROOT(desc, ov)) ov + else RDCSS_Complete(abort) + } + } + case x => throw new MatchError(x) + } + } + + private def RDCSS_ROOT(ov: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]): Boolean = { + val desc = RDCSS_Descriptor(ov, expectedmain, nv) + if (CAS_ROOT(ov, desc)) { + RDCSS_Complete(abort = false) + /*READ*/desc.committed + } else false + } + + @tailrec private def inserthc(k: K, hc: Int, v: V): Unit = { + val r = RDCSS_READ_ROOT() + if (!r.rec_insert(k, v, hc, 0, null, r.gen, this)) inserthc(k, hc, v) + } + + @tailrec private def insertifhc(k: K, hc: Int, v: V, cond: AnyRef, fullEquals: Boolean): Option[V] = { + val r = RDCSS_READ_ROOT() + + val ret = r.rec_insertif(k, v, hc, cond, fullEquals, 0, null, r.gen, this) + if (ret eq null) insertifhc(k, hc, v, cond, fullEquals) + else ret + } + + /** Finds the value associated with this key + * + * @param k the key to look up + * @param hc the hashcode of `k` + * + * @return the value: V associated with `k`, if it exists. Otherwise, INodeBase.NO_SUCH_ELEMENT_SENTINEL + */ + @tailrec private def lookuphc(k: K, hc: Int): AnyRef = { + val r = RDCSS_READ_ROOT() + val res = r.rec_lookup(k, hc, 0, null, r.gen, this) + if (res eq INodeBase.RESTART) lookuphc(k, hc) + else res + } + + /** Removes a key-value pair from the map + * + * @param k the key to remove + * @param v the value compare with the value found associated with the key + * @param removalPolicy policy deciding whether to remove `k` based on `v` and the + * current value associated with `k` (Always, FullEquals, or ReferenceEq) + * @return an Option[V] indicating the previous value + */ + @tailrec private def removehc(k: K, v: V, removalPolicy: Int, hc: Int): Option[V] = { + val r = RDCSS_READ_ROOT() + val res = r.rec_remove(k, v, removalPolicy, hc, 0, null, r.gen, this) + if (res ne null) res + else removehc(k, v, removalPolicy, hc) + } + + + def string = RDCSS_READ_ROOT().string(0) + + /* public methods */ + + def isReadOnly = rootupdater eq null + + def nonReadOnly = rootupdater ne null + + /** Returns a snapshot of this TrieMap. + * This operation is lock-free and linearizable. + * + * The snapshot is lazily updated - the first time some branch + * in the snapshot or this TrieMap are accessed, they are rewritten. + * This means that the work of rebuilding both the snapshot and this + * TrieMap is distributed across all the threads doing updates or accesses + * subsequent to the snapshot creation. + */ + @tailrec def snapshot(): TrieMap[K, V] = { + val r = RDCSS_READ_ROOT() + val expmain = r.gcasRead(this) + if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new TrieMap(r.copyToGen(new Gen, this), rootupdater, hashing, equality) + else snapshot() + } + + /** Returns a read-only snapshot of this TrieMap. + * This operation is lock-free and linearizable. + * + * The snapshot is lazily updated - the first time some branch + * of this TrieMap are accessed, it is rewritten. The work of creating + * the snapshot is thus distributed across subsequent updates + * and accesses on this TrieMap by all threads. + * Note that the snapshot itself is never rewritten unlike when calling + * the `snapshot` method, but the obtained snapshot cannot be modified. + * + * This method is used by other methods such as `size` and `iterator`. + */ + @tailrec def readOnlySnapshot(): scala.collection.Map[K, V] = { + val r = RDCSS_READ_ROOT() + val expmain = r.gcasRead(this) + if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new TrieMap(r, null, hashing, equality) + else readOnlySnapshot() + } + + @tailrec override def clear(): Unit = { + val r = RDCSS_READ_ROOT() + if (!RDCSS_ROOT(r, r.gcasRead(this), INode.newRootNode[K, V](equality))) clear() + } + + def computeHash(k: K) = hashingobj.hash(k) + + @deprecated("Use getOrElse(k, null) instead.", "2.13.0") + def lookup(k: K): V = { + val hc = computeHash(k) + val lookupRes = lookuphc(k, hc) + val res = if (lookupRes == INodeBase.NO_SUCH_ELEMENT_SENTINEL) null else lookupRes + res.asInstanceOf[V] + } + + override def apply(k: K): V = { + val hc = computeHash(k) + val res = lookuphc(k, hc) + if (res eq INodeBase.NO_SUCH_ELEMENT_SENTINEL) throw new NoSuchElementException + else res.asInstanceOf[V] + } + + def get(k: K): Option[V] = { + val hc = computeHash(k) + val res = lookuphc(k, hc) + if (res eq INodeBase.NO_SUCH_ELEMENT_SENTINEL) None else Some(res).asInstanceOf[Option[V]] + } + + override def put(key: K, value: V): Option[V] = { + val hc = computeHash(key) + insertifhc(key, hc, value, INode.KEY_PRESENT_OR_ABSENT, fullEquals = false /* unused */) + } + + override def update(k: K, v: V): Unit = { + val hc = computeHash(k) + inserthc(k, hc, v) + } + + def addOne(kv: (K, V)) = { + update(kv._1, kv._2) + this + } + + override def remove(k: K): Option[V] = { + val hc = computeHash(k) + removehc(k = k, v = null.asInstanceOf[V], RemovalPolicy.Always, hc = hc) + } + + def subtractOne(k: K) = { + remove(k) + this + } + + def putIfAbsent(k: K, v: V): Option[V] = { + val hc = computeHash(k) + insertifhc(k, hc, v, INode.KEY_ABSENT, fullEquals = false /* unused */) + } + + // TODO once computeIfAbsent is added to concurrent.Map, + // move the comment there and tweak the 'at most once' part + /** If the specified key is not already in the map, computes its value using + * the given thunk `op` and enters it into the map. + * + * If the specified mapping function throws an exception, + * that exception is rethrown. + * + * Note: This method will invoke op at most once. + * However, `op` may be invoked without the result being added to the map if + * a concurrent process is also trying to add a value corresponding to the + * same key `k`. + * + * @param k the key to modify + * @param op the expression that computes the value + * @return the newly added value + */ + override def getOrElseUpdate(k: K, op: => V): V = { + val hc = computeHash(k) + lookuphc(k, hc) match { + case INodeBase.NO_SUCH_ELEMENT_SENTINEL => + val v = op + insertifhc(k, hc, v, INode.KEY_ABSENT, fullEquals = false /* unused */) match { + case Some(oldValue) => oldValue + case None => v + } + case oldValue => oldValue.asInstanceOf[V] + } + } + + def remove(k: K, v: V): Boolean = { + val hc = computeHash(k) + removehc(k, v, RemovalPolicy.FullEquals, hc).nonEmpty + } + + override private[collection] def removeRefEq(k: K, v: V): Boolean = { + val hc = computeHash(k) + removehc(k, v, RemovalPolicy.ReferenceEq, hc).nonEmpty + } + + def replace(k: K, oldvalue: V, newvalue: V): Boolean = { + val hc = computeHash(k) + insertifhc(k, hc, newvalue, oldvalue.asInstanceOf[AnyRef], fullEquals = true).nonEmpty + } + + override private[collection] def replaceRefEq(k: K, oldValue: V, newValue: V): Boolean = { + val hc = computeHash(k) + insertifhc(k, hc, newValue, oldValue.asInstanceOf[AnyRef], fullEquals = false).nonEmpty + } + + def replace(k: K, v: V): Option[V] = { + val hc = computeHash(k) + insertifhc(k, hc, v, INode.KEY_PRESENT, fullEquals = false /* unused */) + } + + def iterator: Iterator[(K, V)] = { + if (nonReadOnly) readOnlySnapshot().iterator + else new TrieMapIterator(0, this) + } + + //////////////////////////////////////////////////////////////////////////// + // + // scala/bug#10177 These methods need overrides as the inherited implementations + // call `.iterator` more than once, which doesn't guarantee a coherent + // view of the data if there is a concurrent writer + // Note that the we don't need overrides for keysIterator or valuesIterator + // TrieMapTest validates the behaviour. + override def values: Iterable[V] = { + if (nonReadOnly) readOnlySnapshot().values + else super.values + } + override def keySet: Set[K] = { + if (nonReadOnly) readOnlySnapshot().keySet + else super.keySet + } + + override def view: MapView[K, V] = if (nonReadOnly) readOnlySnapshot().view else super.view + + @deprecated("Use .view.filterKeys(f). A future version will include a strict version of this method (for now, .view.filterKeys(p).toMap).", "2.13.0") + override def filterKeys(p: K => Boolean): collection.MapView[K, V] = view.filterKeys(p) + + @deprecated("Use .view.mapValues(f). A future version will include a strict version of this method (for now, .view.mapValues(f).toMap).", "2.13.0") + override def mapValues[W](f: V => W): collection.MapView[K, W] = view.mapValues(f) + // END extra overrides + /////////////////////////////////////////////////////////////////// + + override def size: Int = + if (nonReadOnly) readOnlySnapshot().size + else RDCSS_READ_ROOT().cachedSize(this) + override def knownSize: Int = + if (nonReadOnly) -1 + else RDCSS_READ_ROOT().knownSize(this) + override def isEmpty: Boolean = + (if (nonReadOnly) readOnlySnapshot() else this).sizeIs == 0 // sizeIs checks knownSize + override protected[this] def className = "TrieMap" + + override def lastOption: Option[(K, V)] = if (isEmpty) None else Try(last).toOption +} + + +@SerialVersionUID(3L) +object TrieMap extends MapFactory[TrieMap] { + + def empty[K, V]: TrieMap[K, V] = new TrieMap[K, V] + + def from[K, V](it: IterableOnce[(K, V)]): TrieMap[K, V] = new TrieMap[K, V]() ++= it + + def newBuilder[K, V]: mutable.GrowableBuilder[(K, V), TrieMap[K, V]] = new GrowableBuilder(empty[K, V]) + + @transient + val inodeupdater: AtomicReferenceFieldUpdater[INodeBase[_, _], MainNode[_, _]] = AtomicReferenceFieldUpdater.newUpdater(classOf[INodeBase[_, _]], classOf[MainNode[_, _]], "mainnode") + + class MangledHashing[K] extends Hashing[K] { + def hash(k: K): Int = scala.util.hashing.byteswap32(k.##) + } + + private[concurrent] object RemovalPolicy { + final val Always = 0 + final val FullEquals = 1 + final val ReferenceEq = 2 + + def shouldRemove[V](removalPolicy: Int)(a: V, b: V): Boolean = + removalPolicy match { + case Always => true + case FullEquals => a == b + case ReferenceEq => a.asInstanceOf[AnyRef] eq b.asInstanceOf[AnyRef] + } + } +} + +// non-final as an extension point for parallel collections +private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: TrieMap[K, V], mustInit: Boolean = true) extends AbstractIterator[(K, V)] { + private val stack = new Array[Array[BasicNode]](7) + private val stackpos = new Array[Int](7) + private var depth = -1 + private var subiter: Iterator[(K, V)] = null + private var current: KVNode[K, V] = null + + if (mustInit) initialize() + + def hasNext = (current ne null) || (subiter ne null) + + def next() = if (hasNext) { + var r: (K, V) = null + if (subiter ne null) { + r = subiter.next() + checkSubiter() + } else { + r = current.kvPair + advance() + } + r + } else Iterator.empty.next() + + private def readin(in: INode[K, V]) = in.gcasRead(ct) match { + case cn: CNode[K, V] => + depth += 1 + stack(depth) = cn.array + stackpos(depth) = -1 + advance() + case tn: TNode[K, V] => + current = tn + case ln: LNode[K, V] => + subiter = ln.entries.iterator + checkSubiter() + case null => + current = null + case mainNode => throw new MatchError(mainNode) + } + + private def checkSubiter() = if (!subiter.hasNext) { + subiter = null + advance() + } + + private def initialize(): Unit = { + assert(ct.isReadOnly) + + val r = ct.RDCSS_READ_ROOT() + readin(r) + } + + @tailrec + final def advance(): Unit = if (depth >= 0) { + val npos = stackpos(depth) + 1 + if (npos < stack(depth).length) { + stackpos(depth) = npos + stack(depth)(npos) match { + case sn: SNode[K, V] @uc => current = sn + case in: INode[K, V] @uc => readin(in) + case basicNode => throw new MatchError(basicNode) + } + } else { + depth -= 1 + advance() + } + } else current = null + + protected def newIterator(_lev: Int, _ct: TrieMap[K, V], _mustInit: Boolean): TrieMapIterator[K, V] = new TrieMapIterator[K, V](_lev, _ct, _mustInit) + + protected def dupTo(it: TrieMapIterator[K, V]): Unit = { + it.level = this.level + it.ct = this.ct + it.depth = this.depth + it.current = this.current + + // these need a deep copy + Array.copy(this.stack, 0, it.stack, 0, 7) + Array.copy(this.stackpos, 0, it.stackpos, 0, 7) + + // this one needs to be evaluated + if (this.subiter == null) it.subiter = null + else { + val lst = this.subiter.to(immutable.List) + this.subiter = lst.iterator + it.subiter = lst.iterator + } + } + + /** Returns a sequence of iterators over subsets of this iterator. + * It's used to ease the implementation of splitters for a parallel version of the TrieMap. + */ + protected def subdivide(): Seq[Iterator[(K, V)]] = if (subiter ne null) { + // the case where an LNode is being iterated + val it = newIterator(level + 1, ct, _mustInit = false) + it.depth = -1 + it.subiter = this.subiter + it.current = null + this.subiter = null + advance() + this.level += 1 + Seq(it, this) + } else if (depth == -1) { + this.level += 1 + Seq(this) + } else { + var d = 0 + while (d <= depth) { + val rem = stack(d).length - 1 - stackpos(d) + if (rem > 0) { + val (arr1, arr2) = stack(d).drop(stackpos(d) + 1).splitAt(rem / 2) + stack(d) = arr1 + stackpos(d) = -1 + val it = newIterator(level + 1, ct, _mustInit = false) + it.stack(0) = arr2 + it.stackpos(0) = -1 + it.depth = 0 + it.advance() // <-- fix it + this.level += 1 + return Seq(this, it) + } + d += 1 + } + this.level += 1 + Seq(this) + } + +} + +/** Only used for ctrie serialization. */ +@SerialVersionUID(3L) +private[concurrent] case object TrieMapSerializationEnd diff --git a/tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala b/tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala new file mode 100644 index 000000000000..3d155337aa93 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala @@ -0,0 +1,260 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +import scala.{unchecked => uc} + +/** Defines converter methods from Scala to Java collections. + * These methods are available through the [[scala.jdk.javaapi.CollectionConverters]] object. + */ +trait AsJavaConverters { + import JavaCollectionWrappers._ + + /** + * Converts a Scala `Iterator` to a Java `Iterator`. + * + * The returned Java `Iterator` is backed by the provided Scala `Iterator` and any side-effects of + * using it via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Iterator` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Iterator` will be returned. + * + * @param i The Scala `Iterator` to be converted. + * @return A Java `Iterator` view of the argument. + */ + def asJava[A](i: Iterator[A]): ju.Iterator[A] = i match { + case null => null + case wrapper: JIteratorWrapper[A @uc] => wrapper.underlying + case _ => new IteratorWrapper(i) + } + + /** + * Converts a Scala `Iterator` to a Java `Enumeration`. + * + * The returned Java `Enumeration` is backed by the provided Scala `Iterator` and any side-effects + * of using it via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Iterator` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Enumeration` will be returned. + * + * @param i The Scala `Iterator` to be converted. + * @return A Java `Enumeration` view of the argument. + */ + def asJavaEnumeration[A](i: Iterator[A]): ju.Enumeration[A] = i match { + case null => null + case wrapper: JEnumerationWrapper[A @uc] => wrapper.underlying + case _ => new IteratorWrapper(i) + } + + /** + * Converts a Scala `Iterable` to a Java `Iterable`. + * + * The returned Java `Iterable` is backed by the provided Scala `Iterable` and any side-effects of + * using it via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Iterable` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Iterable` will be returned. + * + * @param i The Scala `Iterable` to be converted. + * @return A Java `Iterable` view of the argument. + */ + def asJava[A](i: Iterable[A]): jl.Iterable[A] = i match { + case null => null + case wrapper: JIterableWrapper[A @uc] => wrapper.underlying + case _ => new IterableWrapper(i) + } + + /** + * Converts a Scala `Iterable` to an immutable Java `Collection`. + * + * If the Scala `Iterable` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Collection` will be returned. + * + * @param i The Scala `Iterable` to be converted. + * @return A Java `Collection` view of the argument. + */ + def asJavaCollection[A](i: Iterable[A]): ju.Collection[A] = i match { + case null => null + case wrapper: JCollectionWrapper[A @uc] => wrapper.underlying + case _ => new IterableWrapper(i) + } + + /** + * Converts a Scala mutable `Buffer` to a Java List. + * + * The returned Java List is backed by the provided Scala `Buffer` and any side-effects of using + * it via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Buffer` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `List` will be returned. + * + * @param b The Scala `Buffer` to be converted. + * @return A Java `List` view of the argument. + */ + def asJava[A](b: mutable.Buffer[A]): ju.List[A] = b match { + case null => null + case wrapper: JListWrapper[A @uc] => wrapper.underlying + case _ => new MutableBufferWrapper(b) + } + + /** + * Converts a Scala mutable `Seq` to a Java `List`. + * + * The returned Java `List` is backed by the provided Scala `Seq` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Seq` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `List` will be returned. + * + * @param s The Scala `Seq` to be converted. + * @return A Java `List` view of the argument. + */ + def asJava[A](s: mutable.Seq[A]): ju.List[A] = s match { + case null => null + case wrapper: JListWrapper[A @uc] => wrapper.underlying + case _ => new MutableSeqWrapper(s) + } + + /** + * Converts a Scala `Seq` to a Java `List`. + * + * The returned Java `List` is backed by the provided Scala `Seq` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Seq` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `List` will be returned. + * + * @param s The Scala `Seq` to be converted. + * @return A Java `List` view of the argument. + */ + def asJava[A](s: Seq[A]): ju.List[A] = s match { + case null => null + case wrapper: JListWrapper[A @uc] => wrapper.underlying + case _ => new SeqWrapper(s) + } + + /** + * Converts a Scala mutable `Set` to a Java `Set`. + * + * The returned Java `Set` is backed by the provided Scala `Set` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Set` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Set` will be returned. + * + * @param s The Scala mutable `Set` to be converted. + * @return A Java `Set` view of the argument. + */ + def asJava[A](s: mutable.Set[A]): ju.Set[A] = s match { + case null => null + case wrapper: JSetWrapper[A @uc] => wrapper.underlying + case _ => new MutableSetWrapper(s) + } + + /** + * Converts a Scala `Set` to a Java `Set`. + * + * The returned Java `Set` is backed by the provided Scala `Set` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Set` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Set` will be returned. + * + * @param s The Scala `Set` to be converted. + * @return A Java `Set` view of the argument. + */ + def asJava[A](s: Set[A]): ju.Set[A] = s match { + case null => null + case wrapper: JSetWrapper[A @uc] => wrapper.underlying + case _ => new SetWrapper(s) + } + + /** + * Converts a Scala mutable `Map` to a Java `Map`. + * + * The returned Java `Map` is backed by the provided Scala `Map` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Map` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Map` will be returned. + * + * @param m The Scala mutable `Map` to be converted. + * @return A Java `Map` view of the argument. + */ + def asJava[K, V](m: mutable.Map[K, V]): ju.Map[K, V] = m match { + case null => null + case wrapper: JMapWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new MutableMapWrapper(m) + } + + /** + * Converts a Scala mutable `Map` to a Java `Dictionary`. + * + * The returned Java `Dictionary` is backed by the provided Scala `Dictionary` and any + * side-effects of using it via the Java interface will be visible via the Scala interface and + * vice versa. + * + * If the Scala `Map` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Dictionary` will be returned. + * + * @param m The Scala `Map` to be converted. + * @return A Java `Dictionary` view of the argument. + */ + def asJavaDictionary[K, V](m: mutable.Map[K, V]): ju.Dictionary[K, V] = m match { + case null => null + case wrapper: JDictionaryWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new DictionaryWrapper(m) + } + + /** + * Converts a Scala `Map` to a Java `Map`. + * + * The returned Java `Map` is backed by the provided Scala `Map` and any side-effects of using it + * via the Java interface will be visible via the Scala interface and vice versa. + * + * If the Scala `Map` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `Map` will be returned. + * + * @param m The Scala `Map` to be converted. + * @return A Java `Map` view of the argument. + */ + def asJava[K, V](m: Map[K, V]): ju.Map[K, V] = m match { + case null => null + case wrapper: JMapWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new MapWrapper(m) + } + + /** + * Converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`. + * + * The returned Java `ConcurrentMap` is backed by the provided Scala `concurrent.Map` and any + * side-effects of using it via the Java interface will be visible via the Scala interface and + * vice versa. + * + * If the Scala `concurrent.Map` was previously obtained from an implicit or explicit call of + * `asScala` then the original Java `ConcurrentMap` will be returned. + * + * @param m The Scala `concurrent.Map` to be converted. + * @return A Java `ConcurrentMap` view of the argument. + */ + def asJava[K, V](m: concurrent.Map[K, V]): juc.ConcurrentMap[K, V] = m match { + case null => null + case wrapper: JConcurrentMapWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new ConcurrentMapWrapper(m) + } +} diff --git a/tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala b/tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala new file mode 100644 index 000000000000..16b15c513a17 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala @@ -0,0 +1,108 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +/** Defines `asJava` extension methods, available through [[scala.jdk.CollectionConverters]]. */ +trait AsJavaExtensions { + import scala.jdk.javaapi.{CollectionConverters => conv} + + implicit class IteratorHasAsJava[A](i: Iterator[A]) { + /** Converts a Scala `Iterator` to a Java `Iterator`, see + * [[AsJavaConverters.asJava[A](i:Iterator[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Iterator[A] = conv.asJava(i) + + /** Converts a Scala `Iterator` to a Java `Enumeration`, see + * [[AsJavaConverters.asJavaEnumeration `scala.jdk.javaapi.CollectionConverters.asJavaEnumeration`]]. + */ + def asJavaEnumeration: ju.Enumeration[A] = conv.asJavaEnumeration(i) + } + + implicit class IterableHasAsJava[A](i: Iterable[A]) { + /** Converts a Scala `Iterable` to a Java `Iterable`, see + * [[AsJavaConverters.asJava[A](i:Iterable[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: jl.Iterable[A] = conv.asJava(i) + + /** Converts a Scala `Iterator` to a Java `Collection`, see + * [[AsJavaConverters.asJavaCollection `scala.jdk.javaapi.CollectionConverters.asJavaCollection`]]. + */ + def asJavaCollection: ju.Collection[A] = conv.asJavaCollection(i) + } + + implicit class BufferHasAsJava[A](b: mutable.Buffer[A]) { + /** Converts a Scala `Buffer` to a Java `List`, see + * [[AsJavaConverters.asJava[A](b:scala\.collection\.mutable\.Buffer[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.List[A] = conv.asJava(b) + } + + implicit class MutableSeqHasAsJava[A](s: mutable.Seq[A]) { + /** Converts a Scala `Seq` to a Java `List`, see + * [[AsJavaConverters.asJava[A](s:scala\.collection\.mutable\.Seq[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.List[A] = conv.asJava(s) + } + + implicit class SeqHasAsJava[A](s: Seq[A]) { + /** Converts a Scala `Seq` to a Java `List`, see + * [[AsJavaConverters.asJava[A](s:scala\.collection\.Seq[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.List[A] = conv.asJava(s) + } + + implicit class MutableSetHasAsJava[A](s: mutable.Set[A]) { + /** Converts a Scala `mutable.Set` to a Java `Set`, see + * [[AsJavaConverters.asJava[A](s:scala\.collection\.mutable\.Set[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Set[A] = conv.asJava(s) + } + + implicit class SetHasAsJava[A](s: Set[A]) { + /** Converts a Scala `Set` to a Java `Set`, see + * [[AsJavaConverters.asJava[A](s:scala\.collection\.Set[A])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Set[A] = conv.asJava(s) + } + + implicit class MutableMapHasAsJava[K, V](m: mutable.Map[K, V]) { + /** Converts a Scala `mutable.Map` to a Java `Map`, see + * [[AsJavaConverters.asJava[K,V](m:scala\.collection\.mutable\.Map[K,V])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Map[K, V] = conv.asJava(m) + + /** Converts a Scala `mutable.Map` to a Java `Map`, see + * [[AsJavaConverters.asJavaDictionary `scala.jdk.javaapi.CollectionConverters.asJavaDictionary`]]. + */ + def asJavaDictionary: ju.Dictionary[K, V] = conv.asJavaDictionary(m) + } + + implicit class MapHasAsJava[K, V](m: Map[K, V]) { + /** Converts a Scala `Map` to a Java `Map`, see + * [[AsJavaConverters.asJava[K,V](m:scala\.collection\.Map[K,V])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: ju.Map[K, V] = conv.asJava(m) + } + + implicit class ConcurrentMapHasAsJava[K, V](m: concurrent.Map[K, V]) { + /** Converts a Scala `concurrent.Map` to a Java `ConcurrentMap`, see + * [[AsJavaConverters.asJava[K,V](m:scala\.collection\.concurrent\.Map[K,V])* `scala.jdk.javaapi.CollectionConverters.asJava`]]. + */ + def asJava: juc.ConcurrentMap[K, V] = conv.asJava(m) + } +} diff --git a/tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala b/tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala new file mode 100644 index 000000000000..30a28ae38147 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala @@ -0,0 +1,207 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +import scala.{unchecked => uc} + +/** Defines converter methods from Java to Scala collections. + * These methods are available through the [[scala.jdk.javaapi.CollectionConverters]] object. + */ +trait AsScalaConverters { + import JavaCollectionWrappers._ + + /** + * Converts a Java `Iterator` to a Scala `Iterator`. + * + * The returned Scala `Iterator` is backed by the provided Java `Iterator` and any side-effects of + * using it via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Iterator` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `Iterator` will be returned. + * + * @param i The Java `Iterator` to be converted. + * @return A Scala `Iterator` view of the argument. + */ + def asScala[A](i: ju.Iterator[A]): Iterator[A] = i match { + case null => null + case wrapper: IteratorWrapper[A @uc] => wrapper.underlying + case _ => new JIteratorWrapper(i) + } + + /** + * Converts a Java `Enumeration` to a Scala `Iterator`. + * + * The returned Scala `Iterator` is backed by the provided Java `Enumeration` and any side-effects + * of using it via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Enumeration` was previously obtained from an implicit or explicit call of + * `asJavaEnumeration` then the original Scala `Iterator` will be returned. + * + * @param e The Java `Enumeration` to be converted. + * @return A Scala `Iterator` view of the argument. + */ + def asScala[A](e: ju.Enumeration[A]): Iterator[A] = e match { + case null => null + case wrapper: IteratorWrapper[A @uc] => wrapper.underlying + case _ => new JEnumerationWrapper(e) + } + + /** + * Converts a Java `Iterable` to a Scala `Iterable`. + * + * The returned Scala `Iterable` is backed by the provided Java `Iterable` and any side-effects of + * using it via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Iterable` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `Iterable` will be returned. + * + * @param i The Java `Iterable` to be converted. + * @return A Scala `Iterable` view of the argument. + */ + def asScala[A](i: jl.Iterable[A]): Iterable[A] = i match { + case null => null + case wrapper: IterableWrapper[A @uc] => wrapper.underlying + case _ => new JIterableWrapper(i) + } + + /** + * Converts a Java `Collection` to a Scala `Iterable`. + * + * If the Java `Collection` was previously obtained from an implicit or explicit call of + * `asJavaCollection` then the original Scala `Iterable` will be returned. + * + * @param c The Java `Collection` to be converted. + * @return A Scala `Iterable` view of the argument. + */ + def asScala[A](c: ju.Collection[A]): Iterable[A] = c match { + case null => null + case wrapper: IterableWrapper[A @uc] => wrapper.underlying + case _ => new JCollectionWrapper(c) + } + + /** + * Converts a Java `List` to a Scala mutable `Buffer`. + * + * The returned Scala `Buffer` is backed by the provided Java `List` and any side-effects of using + * it via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `List` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `Buffer` will be returned. + * + * @param l The Java `List` to be converted. + * @return A Scala mutable `Buffer` view of the argument. + */ + def asScala[A](l: ju.List[A]): mutable.Buffer[A] = l match { + case null => null + case wrapper: MutableBufferWrapper[A @uc] => wrapper.underlying + case _ => new JListWrapper(l) + } + + /** + * Converts a Java `Set` to a Scala mutable `Set`. + * + * The returned Scala `Set` is backed by the provided Java `Set` and any side-effects of using it + * via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Set` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `Set` will be returned. + * + * @param s The Java `Set` to be converted. + * @return A Scala mutable `Set` view of the argument. + */ + def asScala[A](s: ju.Set[A]): mutable.Set[A] = s match { + case null => null + case wrapper: MutableSetWrapper[A @uc] => wrapper.underlying + case _ => new JSetWrapper(s) + } + + /** + * Converts a Java `Map` to a Scala mutable `Map`. + * + * The returned Scala `Map` is backed by the provided Java `Map` and any side-effects of using it + * via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Map` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `Map` will be returned. + * + * If the wrapped map is synchronized (e.g. from `java.util.Collections.synchronizedMap`), it is + * your responsibility to wrap all non-atomic operations with `underlying.synchronized`. + * This includes `get`, as `java.util.Map`'s API does not allow for an atomic `get` when `null` + * values may be present. + * + * @param m The Java `Map` to be converted. + * @return A Scala mutable `Map` view of the argument. + */ + def asScala[K, V](m: ju.Map[K, V]): mutable.Map[K, V] = m match { + case null => null + case wrapper: MutableMapWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new JMapWrapper(m) + } + + /** + * Converts a Java `ConcurrentMap` to a Scala mutable `ConcurrentMap`. + * + * The returned Scala `ConcurrentMap` is backed by the provided Java `ConcurrentMap` and any + * side-effects of using it via the Scala interface will be visible via the Java interface and + * vice versa. + * + * If the Java `ConcurrentMap` was previously obtained from an implicit or explicit call of + * `asJava` then the original Scala `ConcurrentMap` will be returned. + * + * @param m The Java `ConcurrentMap` to be converted. + * @return A Scala mutable `ConcurrentMap` view of the argument. + */ + def asScala[K, V](m: juc.ConcurrentMap[K, V]): concurrent.Map[K, V] = m match { + case null => null + case wrapper: ConcurrentMapWrapper[K @uc, V @uc] => wrapper.underlyingConcurrentMap + case _ => new JConcurrentMapWrapper(m) + } + + /** + * Converts a Java `Dictionary` to a Scala mutable `Map`. + * + * The returned Scala `Map` is backed by the provided Java `Dictionary` and any side-effects of + * using it via the Scala interface will be visible via the Java interface and vice versa. + * + * If the Java `Dictionary` was previously obtained from an implicit or explicit call of + * `asJavaDictionary` then the original Scala `Map` will be returned. + * + * @param d The Java `Dictionary` to be converted. + * @return A Scala mutable `Map` view of the argument. + */ + def asScala[K, V](d: ju.Dictionary[K, V]): mutable.Map[K, V] = d match { + case null => null + case wrapper: DictionaryWrapper[K @uc, V @uc] => wrapper.underlying + case _ => new JDictionaryWrapper(d) + } + + /** + * Converts a Java `Properties` to a Scala mutable `Map[String, String]`. + * + * The returned Scala `Map[String, String]` is backed by the provided Java `Properties` and any + * side-effects of using it via the Scala interface will be visible via the Java interface and + * vice versa. + * + * @param p The Java `Properties` to be converted. + * @return A Scala mutable `Map[String, String]` view of the argument. + */ + def asScala(p: ju.Properties): mutable.Map[String, String] = p match { + case null => null + case _ => new JPropertiesWrapper(p) + } +} diff --git a/tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala b/tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala new file mode 100644 index 000000000000..39347dde903b --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala @@ -0,0 +1,93 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +/** Defines `asScala` extension methods, available through [[scala.jdk.CollectionConverters]]. */ +trait AsScalaExtensions { + import scala.jdk.javaapi.{CollectionConverters => conv} + + implicit class IteratorHasAsScala[A](i: ju.Iterator[A]) { + /** Converts a Java `Iterator` to a Scala `Iterator`, see + * [[AsScalaConverters.asScala[A](i:java\.util\.Iterator[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: Iterator[A] = conv.asScala(i) + } + + implicit class EnumerationHasAsScala[A](e: ju.Enumeration[A]) { + /** Converts a Java `Enumeration` to a Scala `Iterator`, see + * [[AsScalaConverters.asScala[A](e:java\.util\.Enumeration[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: Iterator[A] = conv.asScala(e) + } + + implicit class IterableHasAsScala[A](i: jl.Iterable[A]) { + /** Converts a Java `Iterable` to a Scala `Iterable`, see + * [[AsScalaConverters.asScala[A](i:Iterable[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: Iterable[A] = conv.asScala(i) + } + + implicit class CollectionHasAsScala[A](c: ju.Collection[A]) { + /** Converts a Java `Collection` to a Scala `Iterable`, see + * [[AsScalaConverters.asScala[A](c:java\.util\.Collection[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: Iterable[A] = conv.asScala(c) + } + + implicit class ListHasAsScala[A](l: ju.List[A]) { + /** Converts a Java `List` to a Scala `Buffer`, see + * [[AsScalaConverters.asScala[A](l:java\.util\.List[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Buffer[A] = conv.asScala(l) + } + + implicit class SetHasAsScala[A](s: ju.Set[A]) { + /** Converts a Java `Set` to a Scala `Set`, see + * [[AsScalaConverters.asScala[A](s:java\.util\.Set[A])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Set[A] = conv.asScala(s) + } + + implicit class MapHasAsScala[K, V](m: ju.Map[K, V]) { + /** Converts a Java `Map` to a Scala `Map`, see + * [[AsScalaConverters.asScala[A,B](m:java\.util\.Map[A,B])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Map[K, V] = conv.asScala(m) + } + + implicit class ConcurrentMapHasAsScala[K, V](m: juc.ConcurrentMap[K, V]) { + /** Converts a Java `ConcurrentMap` to a Scala `concurrent.Map`, see + * [[AsScalaConverters.asScala[A,B](m:java\.util\.concurrent\.ConcurrentMap[A,B])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: concurrent.Map[K, V] = conv.asScala(m) + } + + implicit class DictionaryHasAsScala[K, V](d: ju.Dictionary[K, V]) { + /** Converts a Java `Dictionary` to a Scala `Map`, see + * [[AsScalaConverters.asScala[A,B](d:java\.util\.Dictionary[A,B])* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Map[K, V] = conv.asScala(d) + } + + implicit class PropertiesHasAsScala(i: ju.Properties) { + /** Converts a Java `Properties` to a Scala `Map`, see + * [[AsScalaConverters.asScala(p:java\.util\.Properties)* `scala.jdk.javaapi.CollectionConverters.asScala`]]. + */ + def asScala: mutable.Map[String, String] = conv.asScala(i) + } +} diff --git a/tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala b/tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala new file mode 100644 index 000000000000..05d63f9fdeee --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala @@ -0,0 +1,181 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +import scala.collection.JavaConverters._ +import scala.language.implicitConversions + +/** Defines implicit converter methods from Java to Scala collections. */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +trait ToScalaImplicits { + /** Implicitly converts a Java `Iterator` to a Scala `Iterator`. + * @see [[JavaConverters.asScalaIterator]] + */ + implicit def `iterator asScala`[A](it: ju.Iterator[A]): Iterator[A] = asScalaIterator(it) + + /** Implicitly converts a Java `Enumeration` to a Scala `Iterator`. + * @see [[JavaConverters.enumerationAsScalaIterator]] + */ + implicit def `enumeration AsScalaIterator`[A](i: ju.Enumeration[A]): Iterator[A] = enumerationAsScalaIterator(i) + + /** Implicitly converts a Java `Iterable` to a Scala `Iterable`. + * @see [[JavaConverters.iterableAsScalaIterable]] + */ + implicit def `iterable AsScalaIterable`[A](i: jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable(i) + + /** Implicitly converts a Java `Collection` to an Scala `Iterable`. + * @see [[JavaConverters.collectionAsScalaIterable]] + */ + implicit def `collection AsScalaIterable`[A](i: ju.Collection[A]): Iterable[A] = collectionAsScalaIterable(i) + + /** Implicitly converts a Java `List` to a Scala mutable `Buffer`. + * @see [[JavaConverters.asScalaBuffer]] + */ + implicit def `list asScalaBuffer`[A](l: ju.List[A]): mutable.Buffer[A] = asScalaBuffer(l) + + /** Implicitly converts a Java `Set` to a Scala mutable `Set`. + * @see [[JavaConverters.asScalaSet]] + */ + implicit def `set asScala`[A](s: ju.Set[A]): mutable.Set[A] = asScalaSet(s) + + /** Implicitly converts a Java `Map` to a Scala mutable `Map`. + * @see [[JavaConverters.mapAsScalaMap]] + */ + implicit def `map AsScala`[K, V](m: ju.Map[K, V]): mutable.Map[K, V] = mapAsScalaMap(m) + + /** Implicitly converts a Java `ConcurrentMap` to a Scala mutable `ConcurrentMap`. + * @see [[JavaConverters.mapAsScalaConcurrentMap]] + */ + implicit def `map AsScalaConcurrentMap`[K, V](m: juc.ConcurrentMap[K, V]): concurrent.Map[K, V] = mapAsScalaConcurrentMap(m) + + /** Implicitly converts a Java `Dictionary` to a Scala mutable `Map`. + * @see [[JavaConverters.dictionaryAsScalaMap]] + */ + implicit def `dictionary AsScalaMap`[K, V](p: ju.Dictionary[K, V]): mutable.Map[K, V] = dictionaryAsScalaMap(p) + + /** Implicitly converts a Java `Properties` to a Scala `mutable Map[String, String]`. + * @see [[JavaConverters.propertiesAsScalaMap]] + */ + implicit def `properties AsScalaMap`(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p) +} + +/** Defines implicit conversions from Scala to Java collections. */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +trait ToJavaImplicits { + /** Implicitly converts a Scala `Iterator` to a Java `Iterator`. + * @see [[JavaConverters.asJavaIterator]] + */ + implicit def `iterator asJava`[A](it: Iterator[A]): ju.Iterator[A] = asJavaIterator(it) + + /** Implicitly converts a Scala `Iterator` to a Java `Enumeration`. + * @see [[JavaConverters.asJavaEnumeration]] + */ + implicit def `enumeration asJava`[A](it: Iterator[A]): ju.Enumeration[A] = asJavaEnumeration(it) + + /** Implicitly converts a Scala `Iterable` to a Java `Iterable`. + * @see [[JavaConverters.asJavaIterable]] + */ + implicit def `iterable asJava`[A](i: Iterable[A]): jl.Iterable[A] = asJavaIterable(i) + + /** Implicitly converts a Scala `Iterable` to an immutable Java `Collection`. + * @see [[JavaConverters.asJavaCollection]] + */ + implicit def `collection asJava`[A](it: Iterable[A]): ju.Collection[A] = asJavaCollection(it) + + /** Implicitly converts a Scala mutable `Buffer` to a Java `List`. + * @see [[JavaConverters.bufferAsJavaList]] + */ + implicit def `buffer AsJavaList`[A](b: mutable.Buffer[A]): ju.List[A] = bufferAsJavaList(b) + + /** Implicitly converts a Scala mutable `Seq` to a Java `List`. + * @see [[JavaConverters.mutableSeqAsJavaList]] + */ + implicit def `mutableSeq AsJavaList`[A](seq: mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList(seq) + + /** Implicitly converts a Scala `Seq` to a Java `List`. + * @see [[JavaConverters.seqAsJavaList]] + */ + implicit def `seq AsJavaList`[A](seq: Seq[A]): ju.List[A] = seqAsJavaList(seq) + + /** Implicitly converts a Scala mutable `Set` to a Java `Set`. + * @see [[JavaConverters.mutableSetAsJavaSet]] + */ + implicit def `mutableSet AsJavaSet`[A](s: mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet(s) + + /** Implicitly converts a Scala `Set` to a Java `Set`. + * @see [[JavaConverters.setAsJavaSet]] + */ + implicit def `set AsJavaSet`[A](s: Set[A]): ju.Set[A] = setAsJavaSet(s) + + /** Implicitly converts a Scala mutable `Map` to a Java `Map`. + * @see [[JavaConverters.mutableMapAsJavaMap]] + */ + implicit def `mutableMap AsJavaMap`[K, V](m: mutable.Map[K, V]): ju.Map[K, V] = mutableMapAsJavaMap(m) + + /** Implicitly converts a Scala mutable `Map` to a Java `Dictionary`. + * @see [[JavaConverters.asJavaDictionary]] + */ + implicit def `dictionary asJava`[K, V](m: mutable.Map[K, V]): ju.Dictionary[K, V] = asJavaDictionary(m) + + /** Implicitly converts a Scala `Map` to a Java `Map`. + * @see [[JavaConverters.mapAsJavaMap]] + */ + implicit def `map AsJavaMap`[K, V](m: Map[K, V]): ju.Map[K, V] = mapAsJavaMap(m) + + /** Implicitly converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`. + * @see [[JavaConverters.mapAsJavaConcurrentMap]] + */ + implicit def `map AsJavaConcurrentMap`[K, V](m: concurrent.Map[K, V]): juc.ConcurrentMap[K, V] = mapAsJavaConcurrentMap(m) +} + +/** + * Convenience for miscellaneous implicit conversions from Scala to Java collections API. + * + * It is recommended to use explicit conversions provided by [[collection.JavaConverters]] instead. + * Implicit conversions may cause unexpected issues, see [[ImplicitConversions]]. + */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +object ImplicitConversionsToJava extends ToJavaImplicits + +/** + * Convenience for miscellaneous implicit conversions from Java to Scala collections API. + * + * It is recommended to use explicit conversions provided by [[collection.JavaConverters]] instead. + * Implicit conversions may cause unexpected issues, see [[ImplicitConversions]]. + */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +object ImplicitConversionsToScala extends ToScalaImplicits + +/** + * Convenience for miscellaneous implicit conversions between Java and Scala collections API. + * + * It is recommended to use explicit conversions provided by [[collection.JavaConverters]] instead. + * Implicit conversions may cause unexpected issues. Example: + * + * {{{ + * import collection.convert.ImplicitConversions._ + * case class StringBox(s: String) + * val m = Map(StringBox("one") -> "uno") + * m.get("one") + * }}} + * + * The above example returns `null` instead of producing a type error at compile-time. The map is + * implicitly converted to a `java.util.Map` which provides a method `get(x: AnyRef)`. + */ +@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") +object ImplicitConversions extends ToScalaImplicits with ToJavaImplicits diff --git a/tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala b/tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala new file mode 100644 index 000000000000..29c3dcbac5db --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala @@ -0,0 +1,614 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package convert + +import java.util.{concurrent => juc} +import java.util.{NavigableMap} +import java.{lang => jl, util => ju} + +import scala.jdk.CollectionConverters._ +import scala.util.Try +import scala.util.chaining._ +import scala.util.control.ControlThrowable + +/** Wrappers for exposing Scala collections as Java collections and vice-versa */ +@SerialVersionUID(3L) +// not private[convert] because `WeakHashMap` uses JMapWrapper +private[collection] object JavaCollectionWrappers extends Serializable { + @SerialVersionUID(3L) + class IteratorWrapper[A](val underlying: Iterator[A]) extends ju.Iterator[A] with ju.Enumeration[A] with Serializable { + def hasNext = underlying.hasNext + def next() = underlying.next() + def hasMoreElements = underlying.hasNext + def nextElement() = underlying.next() + override def remove() = throw new UnsupportedOperationException + } + + @SerialVersionUID(3L) + class JIteratorWrapper[A](val underlying: ju.Iterator[A]) extends AbstractIterator[A] with Iterator[A] with Serializable { + def hasNext = underlying.hasNext + def next() = underlying.next + } + + @SerialVersionUID(3L) + class JEnumerationWrapper[A](val underlying: ju.Enumeration[A]) extends AbstractIterator[A] with Iterator[A] with Serializable { + def hasNext = underlying.hasMoreElements + def next() = underlying.nextElement + } + + trait IterableWrapperTrait[A] extends ju.AbstractCollection[A] { + val underlying: Iterable[A] + def size = underlying.size + override def iterator = new IteratorWrapper(underlying.iterator) + override def isEmpty = underlying.isEmpty + } + + @SerialVersionUID(3L) + class IterableWrapper[A](val underlying: Iterable[A]) extends ju.AbstractCollection[A] with IterableWrapperTrait[A] with Serializable { + import scala.runtime.Statics._ + override def equals(other: Any): Boolean = + other match { + case other: IterableWrapper[_] => underlying.equals(other.underlying) + case _ => false + } + override def hashCode = finalizeHash(mix(mix(0xcafebabe, "IterableWrapper".hashCode), anyHash(underlying)), 1) + } + + @SerialVersionUID(3L) + class JIterableWrapper[A](val underlying: jl.Iterable[A]) + extends AbstractIterable[A] + with StrictOptimizedIterableOps[A, Iterable, Iterable[A]] + with Serializable { + def iterator = underlying.iterator.asScala + override def iterableFactory = mutable.ArrayBuffer + override def isEmpty: Boolean = !underlying.iterator().hasNext + } + + @SerialVersionUID(3L) + class JCollectionWrapper[A](val underlying: ju.Collection[A]) + extends AbstractIterable[A] + with StrictOptimizedIterableOps[A, Iterable, Iterable[A]] + with Serializable { + def iterator = underlying.iterator.asScala + override def size = underlying.size + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + override def isEmpty = underlying.isEmpty + override def iterableFactory = mutable.ArrayBuffer + } + + @SerialVersionUID(3L) + class SeqWrapper[A](val underlying: Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] with Serializable { + def get(i: Int) = underlying(i) + } + + @SerialVersionUID(3L) + class MutableSeqWrapper[A](val underlying: mutable.Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] with Serializable { + def get(i: Int) = underlying(i) + override def set(i: Int, elem: A) = { + val p = underlying(i) + underlying(i) = elem + p + } + } + + @SerialVersionUID(3L) + class MutableBufferWrapper[A](val underlying: mutable.Buffer[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] with Serializable { + def get(i: Int) = underlying(i) + override def set(i: Int, elem: A) = { val p = underlying(i); underlying(i) = elem; p } + override def add(elem: A) = { underlying += elem; true } + override def remove(i: Int) = underlying remove i + } + + @SerialVersionUID(3L) + class JListWrapper[A](val underlying: ju.List[A]) + extends mutable.AbstractBuffer[A] + with SeqOps[A, mutable.Buffer, mutable.Buffer[A]] + with StrictOptimizedSeqOps[A, mutable.Buffer, mutable.Buffer[A]] + with IterableFactoryDefaults[A, mutable.Buffer] + with Serializable { + def length = underlying.size + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + override def isEmpty = underlying.isEmpty + override def iterator: Iterator[A] = underlying.iterator.asScala + def apply(i: Int) = underlying.get(i) + def update(i: Int, elem: A) = underlying.set(i, elem) + def prepend(elem: A) = { underlying.subList(0, 0) add elem; this } + def addOne(elem: A): this.type = { underlying add elem; this } + def insert(idx: Int,elem: A): Unit = underlying.subList(0, idx).add(elem) + def insertAll(i: Int, elems: IterableOnce[A]) = { + val ins = underlying.subList(0, i) + elems.iterator.foreach(ins.add(_)) + } + def remove(i: Int) = underlying.remove(i) + def clear() = underlying.clear() + // Note: Clone cannot just call underlying.clone because in Java, only specific collections + // expose clone methods. Generically, they're protected. + override def clone(): JListWrapper[A] = new JListWrapper(new ju.ArrayList[A](underlying)) + def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A], replaced: Int): this.type = { + remove(from, replaced) + insertAll(from, patch) + this + } + def remove(from: Int, n: Int): Unit = underlying.subList(from, from+n).clear() + override def iterableFactory = mutable.ArrayBuffer + override def subtractOne(elem: A): this.type = { underlying.remove(elem.asInstanceOf[AnyRef]); this } + } + + @SerialVersionUID(3L) + class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] with Serializable { self => + // Note various overrides to avoid performance gotchas. + override def contains(o: Object): Boolean = { + try { underlying.contains(o.asInstanceOf[A]) } + catch { case cce: ClassCastException => false } + } + override def isEmpty = underlying.isEmpty + def size = underlying.size + def iterator = new ju.Iterator[A] { + val ui = underlying.iterator + var prev: Option[A] = None + def hasNext = ui.hasNext + def next = { val e = ui.next(); prev = Some(e); e } + override def remove() = prev match { + case Some(e) => + underlying match { + case ms: mutable.Set[a] => + ms remove e + prev = None + case _ => + throw new UnsupportedOperationException("remove") + } + case _ => + throw new IllegalStateException("next must be called at least once before remove") + } + } + } + + @SerialVersionUID(3L) + class MutableSetWrapper[A](val underlying: mutable.Set[A]) extends SetWrapper[A](underlying) with Serializable { + override def add(elem: A) = { + val sz = underlying.size + underlying += elem + sz < underlying.size + } + override def remove(elem: AnyRef) = + try underlying.remove(elem.asInstanceOf[A]) + catch { case ex: ClassCastException => false } + override def clear() = underlying.clear() + } + + @SerialVersionUID(3L) + class JSetWrapper[A](val underlying: ju.Set[A]) + extends mutable.AbstractSet[A] + with mutable.SetOps[A, mutable.Set, mutable.Set[A]] + with StrictOptimizedSetOps[A, mutable.Set, mutable.Set[A]] + with Serializable { + + override def size: Int = underlying.size + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + def iterator: Iterator[A] = underlying.iterator.asScala + + def contains(elem: A): Boolean = underlying.contains(elem) + + def addOne(elem: A): this.type = { underlying add elem; this } + def subtractOne(elem: A): this.type = { underlying remove elem; this } + + override def remove(elem: A): Boolean = underlying remove elem + + override def clear(): Unit = { + underlying.clear() + } + + override def empty: mutable.Set[A] = new JSetWrapper(new ju.HashSet[A]) + + // Note: Clone cannot just call underlying.clone because in Java, only specific collections + // expose clone methods. Generically, they're protected. + override def clone(): mutable.Set[A] = new JSetWrapper[A](new ju.LinkedHashSet[A](underlying)) + + override def iterableFactory: IterableFactory[mutable.Set] = mutable.HashSet + + override def filterInPlace(p: A => Boolean): this.type = { + if (underlying.size() > 0) underlying.removeIf(!p(_)) + this + } + } + + @SerialVersionUID(3L) + class MapWrapper[K, V](underlying: Map[K, V]) extends ju.AbstractMap[K, V] with Serializable { self => + override def size = underlying.size + + override def get(key: AnyRef): V = try { + underlying get key.asInstanceOf[K] match { + case None => null.asInstanceOf[V] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[V] + } + + override def entrySet: ju.Set[ju.Map.Entry[K, V]] = new ju.AbstractSet[ju.Map.Entry[K, V]] { + def size = self.size + + def iterator = new ju.Iterator[ju.Map.Entry[K, V]] { + val ui = underlying.iterator + var prev : Option[K] = None + + def hasNext = ui.hasNext + + def next() = { + val (k, v) = ui.next() + prev = Some(k) + new ju.Map.Entry[K, V] { + def getKey = k + def getValue = v + def setValue(v1 : V) = self.put(k, v1) + + // It's important that this implementation conform to the contract + // specified in the javadocs of java.util.Map.Entry.hashCode + // + // See https://github.com/scala/bug/issues/10663 + override def hashCode = { + (if (k == null) 0 else k.hashCode()) ^ + (if (v == null) 0 else v.hashCode()) + } + + override def equals(other: Any) = other match { + case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue + case _ => false + } + } + } + + override def remove(): Unit = { + prev match { + case Some(k) => + underlying match { + case mm: mutable.Map[a, _] => + mm -= k + prev = None + case _ => + throw new UnsupportedOperationException("remove") + } + case _ => + throw new IllegalStateException("next must be called at least once before remove") + } + } + } + } + + override def containsKey(key: AnyRef): Boolean = try { + // Note: Subclass of collection.Map with specific key type may redirect generic + // contains to specific contains, which will throw a ClassCastException if the + // wrong type is passed. This is why we need a type cast to A inside a try/catch. + underlying.contains(key.asInstanceOf[K]) + } catch { + case ex: ClassCastException => false + } + } + + @SerialVersionUID(3L) + class MutableMapWrapper[K, V](val underlying: mutable.Map[K, V]) extends MapWrapper[K, V](underlying) { + override def put(k: K, v: V) = underlying.put(k, v) match { + case Some(v1) => v1 + case None => null.asInstanceOf[V] + } + + override def remove(k: AnyRef): V = try { + underlying remove k.asInstanceOf[K] match { + case None => null.asInstanceOf[V] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[V] + } + + override def clear() = underlying.clear() + } + + @SerialVersionUID(3L) + abstract class AbstractJMapWrapper[K, V] + extends mutable.AbstractMap[K, V] + with JMapWrapperLike[K, V, mutable.Map, mutable.Map[K, V]] with Serializable + + trait JMapWrapperLike[K, V, +CC[X, Y] <: mutable.MapOps[X, Y, CC, _], +C <: mutable.MapOps[K, V, CC, C]] + extends mutable.MapOps[K, V, CC, C] + with StrictOptimizedMapOps[K, V, CC, C] + with StrictOptimizedIterableOps[(K, V), mutable.Iterable, C] { + + def underlying: ju.Map[K, V] + + override def size = underlying.size + + // support Some(null) if currently bound to null + def get(k: K) = { + val v = underlying.get(k) + if (v != null) + Some(v) + else if (underlying.containsKey(k)) + Some(null.asInstanceOf[V]) + else + None + } + + override def getOrElseUpdate(key: K, op: => V): V = + underlying.computeIfAbsent(key, _ => op) match { + case null => update(key, null.asInstanceOf[V]); null.asInstanceOf[V] + case v => v + } + + def addOne(kv: (K, V)): this.type = { underlying.put(kv._1, kv._2); this } + def subtractOne(key: K): this.type = { underlying remove key; this } + + // support Some(null) if currently bound to null + override def put(k: K, v: V): Option[V] = + if (v == null) { + val present = underlying.containsKey(k) + val result = underlying.put(k, v) + if (present) Some(result) else None + } else { + var result: Option[V] = None + def recompute(k0: K, v0: V): V = v.tap(_ => + if (v0 != null) result = Some(v0) + else if (underlying.containsKey(k0)) result = Some(null.asInstanceOf[V]) + ) + underlying.compute(k, recompute) + result + } + + override def update(k: K, v: V): Unit = underlying.put(k, v) + + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + def remap(k: K, v: V): V = + remappingFunction(Option(v)) match { + case Some(null) => throw PutNull + case Some(x) => x + case None => null.asInstanceOf[V] + } + try Option(underlying.compute(key, remap)) + catch { + case PutNull => update(key, null.asInstanceOf[V]); Some(null.asInstanceOf[V]) + } + } + + // support Some(null) if currently bound to null + override def remove(k: K): Option[V] = { + var result: Option[V] = None + def recompute(k0: K, v0: V): V = { + if (v0 != null) result = Some(v0) + else if (underlying.containsKey(k0)) result = Some(null.asInstanceOf[V]) + null.asInstanceOf[V] + } + underlying.compute(k, recompute) + result + } + + def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { + val ui = underlying.entrySet.iterator + def hasNext = ui.hasNext + def next() = { val e = ui.next(); (e.getKey, e.getValue) } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val i = underlying.entrySet().iterator() + while (i.hasNext) { + val entry = i.next() + f(entry.getKey, entry.getValue) + } + } + + override def clear() = underlying.clear() + + } + + /** Wraps a Java map as a Scala one. If the map is to support concurrent access, + * use [[JConcurrentMapWrapper]] instead. If the wrapped map is synchronized + * (e.g. from `java.util.Collections.synchronizedMap`), it is your responsibility + * to wrap all non-atomic operations with `underlying.synchronized`. + * This includes `get`, as `java.util.Map`'s API does not allow for an + * atomic `get` when `null` values may be present. + */ + @SerialVersionUID(3L) + class JMapWrapper[K, V](val underlying : ju.Map[K, V]) + extends AbstractJMapWrapper[K, V] with Serializable { + + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + override def empty = new JMapWrapper(new ju.HashMap[K, V]) + } + + @SerialVersionUID(3L) + class ConcurrentMapWrapper[K, V](underlying: concurrent.Map[K, V]) extends MutableMapWrapper[K, V](underlying) with juc.ConcurrentMap[K, V] { + + def underlyingConcurrentMap: concurrent.Map[K, V] = underlying + + override def putIfAbsent(k: K, v: V) = underlying.putIfAbsent(k, v) match { + case Some(v) => v + case None => null.asInstanceOf[V] + } + + override def remove(k: AnyRef, v: AnyRef) = try { + underlying.remove(k.asInstanceOf[K], v.asInstanceOf[V]) + } catch { + case ex: ClassCastException => + false + } + + override def replace(k: K, v: V): V = underlying.replace(k, v) match { + case Some(v) => v + case None => null.asInstanceOf[V] + } + + override def replace(k: K, oldval: V, newval: V) = underlying.replace(k, oldval, newval) + } + + /** Wraps a concurrent Java map as a Scala one. Single-element concurrent + * access is supported; multi-element operations such as maps and filters + * are not guaranteed to be atomic. + */ + @SerialVersionUID(3L) + class JConcurrentMapWrapper[K, V](val underlying: juc.ConcurrentMap[K, V]) + extends AbstractJMapWrapper[K, V] + with concurrent.Map[K, V] { + + override def get(k: K) = Option(underlying get k) + + override def getOrElseUpdate(key: K, op: => V): V = + underlying.computeIfAbsent(key, _ => op) match { + case null => super/*[concurrent.Map]*/.getOrElseUpdate(key, op) + case v => v + } + + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + override def empty = new JConcurrentMapWrapper(new juc.ConcurrentHashMap[K, V]) + + def putIfAbsent(k: K, v: V): Option[V] = Option(underlying.putIfAbsent(k, v)) + + def remove(k: K, v: V): Boolean = underlying.remove(k, v) + + def replace(k: K, v: V): Option[V] = Option(underlying.replace(k, v)) + + def replace(k: K, oldvalue: V, newvalue: V): Boolean = underlying.replace(k, oldvalue, newvalue) + + override def lastOption: Option[(K, V)] = + underlying match { + case nav: NavigableMap[K @unchecked, V @unchecked] => Option(nav.lastEntry).map(e => (e.getKey, e.getValue)) + case _ if isEmpty => None + case _ => Try(last).toOption + } + + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + def remap(k: K, v: V): V = + remappingFunction(Option(v)) match { + case Some(null) => throw PutNull // see scala/scala#10129 + case Some(x) => x + case None => null.asInstanceOf[V] + } + try Option(underlying.compute(key, remap)) + catch { + case PutNull => super/*[concurrent.Map]*/.updateWith(key)(remappingFunction) + } + } + } + + @SerialVersionUID(3L) + class DictionaryWrapper[K, V](val underlying: mutable.Map[K, V]) extends ju.Dictionary[K, V] with Serializable { + def size: Int = underlying.size + def isEmpty: Boolean = underlying.isEmpty + def keys: ju.Enumeration[K] = underlying.keysIterator.asJavaEnumeration + def elements: ju.Enumeration[V] = underlying.valuesIterator.asJavaEnumeration + def get(key: AnyRef) = try { + underlying get key.asInstanceOf[K] match { + case None => null.asInstanceOf[V] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[V] + } + def put(key: K, value: V): V = underlying.put(key, value) match { + case Some(v) => v + case None => null.asInstanceOf[V] + } + override def remove(key: AnyRef) = try { + underlying remove key.asInstanceOf[K] match { + case None => null.asInstanceOf[V] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[V] + } + } + + @SerialVersionUID(3L) + class JDictionaryWrapper[K, V](val underlying: ju.Dictionary[K, V]) extends mutable.AbstractMap[K, V] with Serializable { + override def size: Int = underlying.size + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = if (underlying.isEmpty) 0 else super.knownSize + + def get(k: K) = Option(underlying get k) + + def addOne(kv: (K, V)): this.type = { underlying.put(kv._1, kv._2); this } + def subtractOne(key: K): this.type = { underlying remove key; this } + + override def put(k: K, v: V): Option[V] = Option(underlying.put(k, v)) + + override def update(k: K, v: V): Unit = { underlying.put(k, v) } + + override def remove(k: K): Option[V] = Option(underlying remove k) + def iterator = underlying.keys.asScala map (k => (k, underlying get k)) + + override def clear() = iterator.foreach(entry => underlying.remove(entry._1)) + + override def mapFactory = mutable.HashMap + } + + @SerialVersionUID(3L) + class JPropertiesWrapper(underlying: ju.Properties) + extends mutable.AbstractMap[String, String] + with mutable.MapOps[String, String, mutable.Map, mutable.Map[String, String]] + with StrictOptimizedMapOps[String, String, mutable.Map, mutable.Map[String, String]] + with StrictOptimizedIterableOps[(String, String), mutable.Iterable, mutable.Map[String, String]] + with Serializable { + + override def size = underlying.size + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = size + def get(k: String) = { + val v = underlying get k + if (v != null) Some(v.asInstanceOf[String]) else None + } + + def addOne(kv: (String, String)): this.type = { underlying.put(kv._1, kv._2); this } + def subtractOne(key: String): this.type = { underlying remove key; this } + + override def put(k: String, v: String): Option[String] = { + val r = underlying.put(k, v) + if (r != null) Some(r.asInstanceOf[String]) else None + } + + override def update(k: String, v: String): Unit = { underlying.put(k, v) } + + override def remove(k: String): Option[String] = { + val r = underlying remove k + if (r != null) Some(r.asInstanceOf[String]) else None + } + + def iterator: Iterator[(String, String)] = new AbstractIterator[(String, String)] { + val ui = underlying.entrySet.iterator + def hasNext = ui.hasNext + def next() = { + val e = ui.next() + (e.getKey.asInstanceOf[String], e.getValue.asInstanceOf[String]) + } + } + + override def clear() = underlying.clear() + + override def empty = new JPropertiesWrapper(new ju.Properties) + + def getProperty(key: String) = underlying.getProperty(key) + + def getProperty(key: String, defaultValue: String) = + underlying.getProperty(key, defaultValue) + + def setProperty(key: String, value: String) = + underlying.setProperty(key, value) + + override def mapFactory = mutable.HashMap + } + + /** Thrown when certain Map operations attempt to put a null value. */ + private val PutNull = new ControlThrowable {} +} diff --git a/tests/pos-special/stdlib/collection/convert/StreamExtensions.scala b/tests/pos-special/stdlib/collection/convert/StreamExtensions.scala new file mode 100644 index 000000000000..cdeea62fb5ed --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/StreamExtensions.scala @@ -0,0 +1,480 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert + +import java.util.Spliterator +import java.util.stream._ +import java.{lang => jl} + +import scala.annotation.implicitNotFound +import scala.collection.Stepper.EfficientSplit +import scala.collection._ +import scala.collection.convert.StreamExtensions.{AccumulatorFactoryInfo, StreamShape, StreamUnboxer} +import scala.jdk.CollectionConverters._ +import scala.jdk._ + +/** Defines extension methods to create Java Streams for Scala collections, available through + * [[scala.jdk.javaapi.StreamConverters]]. + */ +trait StreamExtensions { + // collections + + implicit class IterableHasSeqStream[A](cc: IterableOnce[A]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for this collection. If the + * collection contains primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaSeqStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[A, S, St], st: StepperShape[A, St]): S = + s.fromStepper(cc.stepper, par = false) + } + + // Not `CC[X] <: IterableOnce[X]`, but `C` with an extra constraint, to support non-parametric classes like IntAccumulator + implicit class IterableNonGenericHasParStream[A, C <: IterableOnce[_]](c: C)(implicit ev: C <:< IterableOnce[A]) { + private type IterableOnceWithEfficientStepper = IterableOnce[A] { + def stepper[S <: Stepper[_]](implicit shape : StepperShape[A, S]) : S with EfficientSplit + } + + /** Create a parallel [[java.util.stream.Stream Java Stream]] for this collection. If the + * collection contains primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaParStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[A, S, St], + st: StepperShape[A, St], + @implicitNotFound("`parStream` can only be called on collections where `stepper` returns a `Stepper with EfficientSplit`") + isEfficient: C <:< IterableOnceWithEfficientStepper): S = + s.fromStepper(ev(c).stepper, par = true) + } + + // maps + + implicit class MapHasSeqKeyValueStream[K, V, CC[X, Y] <: collection.MapOps[X, Y, collection.Map, _]](cc: CC[K, V]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for the keys of this map. If + * the keys are primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaSeqKeyStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[K, S, St], st: StepperShape[K, St]): S = + s.fromStepper(cc.keyStepper, par = false) + + /** Create a sequential [[java.util.stream.Stream Java Stream]] for the values of this map. If + * the values are primitives, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaSeqValueStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[V, S, St], st: StepperShape[V, St]): S = + s.fromStepper(cc.valueStepper, par = false) + + // The asJavaSeqStream extension method for IterableOnce doesn't apply because its `CC` takes a single type parameter, whereas the one here takes two + /** Create a sequential [[java.util.stream.Stream Java Stream]] for the `(key, value)` pairs of + * this map. + */ + def asJavaSeqStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[(K, V), S, St], st: StepperShape[(K, V), St]): S = + s.fromStepper(cc.stepper, par = false) + } + + + implicit class MapHasParKeyValueStream[K, V, CC[X, Y] <: collection.MapOps[X, Y, collection.Map, _]](cc: CC[K, V]) { + private type MapOpsWithEfficientKeyStepper = collection.MapOps[K, V, collection.Map, _] { def keyStepper[S <: Stepper[_]](implicit shape : StepperShape[K, S]) : S with EfficientSplit } + private type MapOpsWithEfficientValueStepper = collection.MapOps[K, V, collection.Map, _] { def valueStepper[S <: Stepper[_]](implicit shape : StepperShape[V, S]) : S with EfficientSplit } + private type MapOpsWithEfficientStepper = collection.MapOps[K, V, collection.Map, _] { def stepper[S <: Stepper[_]](implicit shape : StepperShape[(K, V), S]) : S with EfficientSplit } + + /** Create a parallel [[java.util.stream.Stream Java Stream]] for the keys of this map. If + * the keys are primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaParKeyStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[K, S, St], + st: StepperShape[K, St], + @implicitNotFound("parKeyStream can only be called on maps where `keyStepper` returns a `Stepper with EfficientSplit`") + isEfficient: CC[K, V] <:< MapOpsWithEfficientKeyStepper): S = + s.fromStepper(cc.keyStepper, par = true) + + /** Create a parallel [[java.util.stream.Stream Java Stream]] for the values of this map. If + * the values are primitives, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaParValueStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[V, S, St], + st: StepperShape[V, St], + @implicitNotFound("parValueStream can only be called on maps where `valueStepper` returns a `Stepper with EfficientSplit`") + isEfficient: CC[K, V] <:< MapOpsWithEfficientValueStepper): S = + s.fromStepper(cc.valueStepper, par = true) + + // The asJavaParStream extension method for IterableOnce doesn't apply because its `CC` takes a single type parameter, whereas the one here takes two + /** Create a parallel [[java.util.stream.Stream Java Stream]] for the `(key, value)` pairs of + * this map. + */ + def asJavaParStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit + s: StreamShape[(K, V), S, St], + st: StepperShape[(K, V), St], + @implicitNotFound("parStream can only be called on maps where `stepper` returns a `Stepper with EfficientSplit`") + isEfficient: CC[K, V] <:< MapOpsWithEfficientStepper): S = + s.fromStepper(cc.stepper, par = true) + } + + // steppers + + implicit class StepperHasSeqStream[A](stepper: Stepper[A]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for this stepper. If the + * stepper yields primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaSeqStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[A, S, St], st: StepperShape[A, St]): S = { + val sStepper = stepper match { + case as: AnyStepper[A] => st.seqUnbox(as) + case _ => stepper.asInstanceOf[St] + } + s.fromStepper(sStepper, par = false) + } + } + + implicit class StepperHasParStream[A](stepper: Stepper[A] with EfficientSplit) { + /** Create a parallel [[java.util.stream.Stream Java Stream]] for this stepper. If the + * stepper yields primitive values, a corresponding specialized Stream is returned (e.g., + * [[java.util.stream.IntStream `IntStream`]]). + */ + def asJavaParStream[S <: BaseStream[_, _], St <: Stepper[_]](implicit s: StreamShape[A, S, St], st: StepperShape[A, St]): S = { + val sStepper = stepper match { + case as: AnyStepper[A] with EfficientSplit => st.parUnbox(as) + case _ => stepper.asInstanceOf[St] + } + s.fromStepper(sStepper, par = true) + } + } + + // arrays + // uses the JDK array spliterators (`DoubleArraySpliterator`). users can also call + // `array.stepper.seqStream`, which then uses the Scala steppers (`DoubleArrayStepper`). the + // steppers are also available on byte/short/char/float arrays (`WidenedByteArrayStepper`), + // JDK spliterators only for double/int/long/reference. + + implicit class DoubleArrayHasSeqParStream(a: Array[Double]) { + /** Create a sequential [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def asJavaSeqStream: DoubleStream = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def asJavaParStream: DoubleStream = asJavaSeqStream.parallel + } + + implicit class IntArrayHasSeqParStream(a: Array[Int]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaSeqStream: IntStream = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaParStream: IntStream = asJavaSeqStream.parallel + } + + implicit class LongArrayHasSeqParStream(a: Array[Long]) { + /** Create a sequential [[java.util.stream.LongStream Java LongStream]] for this array. */ + def asJavaSeqStream: LongStream = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.LongStream Java LongStream]] for this array. */ + def asJavaParStream: LongStream = asJavaSeqStream.parallel + } + + implicit class AnyArrayHasSeqParStream[A <: AnyRef](a: Array[A]) { + /** Create a sequential [[java.util.stream.Stream Java Stream]] for this array. */ + def asJavaSeqStream: Stream[A] = java.util.Arrays.stream(a) + /** Create a parallel [[java.util.stream.Stream Java Stream]] for this array. */ + def asJavaParStream: Stream[A] = asJavaSeqStream.parallel + } + + implicit class ByteArrayHasSeqParStream(a: Array[Byte]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaSeqStream: IntStream = a.stepper.asJavaSeqStream + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaParStream: IntStream = a.stepper.asJavaParStream + } + + implicit class ShortArrayHasSeqParStream(a: Array[Short]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaSeqStream: IntStream = a.stepper.asJavaSeqStream + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaParStream: IntStream = a.stepper.asJavaParStream + } + + implicit class CharArrayHasSeqParStream(a: Array[Char]) { + /** Create a sequential [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaSeqStream: IntStream = a.stepper.asJavaSeqStream + /** Create a parallel [[java.util.stream.IntStream Java IntStream]] for this array. */ + def asJavaParStream: IntStream = a.stepper.asJavaParStream + } + + implicit class FloatArrayHasSeqParStream(a: Array[Float]) { + /** Create a sequential [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def asJavaSeqStream: DoubleStream = a.stepper.asJavaSeqStream + /** Create a parallel [[java.util.stream.DoubleStream Java DoubleStream]] for this array. */ + def asJavaParStream: DoubleStream = a.stepper.asJavaParStream + } + + + + // strings + + implicit class StringHasSeqParStream(s: String) { + /** + * A sequential stream on the characters of a string, same as [[asJavaSeqCharStream]]. See also + * [[asJavaSeqCodePointStream]]. + */ + def asJavaSeqStream: IntStream = StreamSupport.intStream(s.stepper.spliterator, /* par = */ false) + /** + * A parallel stream on the characters of a string, same as [[asJavaParCharStream]]. See also + * [[asJavaParCodePointStream]]. + */ + def asJavaParStream: IntStream = StreamSupport.intStream(s.stepper.spliterator, /* par = */ true) + + /** A sequential stream on the characters of a string. See also [[asJavaSeqCodePointStream]]. */ + def asJavaSeqCharStream: IntStream = StreamSupport.intStream(s.charStepper.spliterator, /* par = */ false) + /** A parallel stream on the characters of a string. See also [[asJavaParCodePointStream]]. */ + def asJavaParCharStream: IntStream = StreamSupport.intStream(s.charStepper.spliterator, /* par = */ true) + + /** A sequential stream on the code points of a string. See also [[asJavaSeqCharStream]]. */ + def asJavaSeqCodePointStream: IntStream = StreamSupport.intStream(s.codePointStepper.spliterator, /* par = */ false) + /** A parallel stream on the code points of a string. See also [[asJavaParCharStream]]. */ + def asJavaParCodePointStream: IntStream = StreamSupport.intStream(s.codePointStepper.spliterator, /* par = */ true) + } + + // toScala for streams + + implicit class StreamHasToScala[A](stream: Stream[A]) { + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScala(Accumulator)` + * builds the result in parallel. + * + * A `toScala(Accumulator)` call automatically converts streams of boxed integers, longs or + * doubles are converted to the primitive accumulators ([[scala.jdk.IntAccumulator]], etc.). + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[C1](factory: collection.Factory[A, C1])(implicit info: AccumulatorFactoryInfo[A, C1]): C1 = { + + def anyAcc = stream.collect(AnyAccumulator.supplier[A], AnyAccumulator.adder[A], AnyAccumulator.merger[A]) + if (info.companion == AnyAccumulator) anyAcc.asInstanceOf[C1] + else if (info.companion == IntAccumulator) stream.asInstanceOf[Stream[Int]].collect(IntAccumulator.supplier, IntAccumulator.boxedAdder, IntAccumulator.merger).asInstanceOf[C1] + else if (info.companion == LongAccumulator) stream.asInstanceOf[Stream[Long]].collect(LongAccumulator.supplier, LongAccumulator.boxedAdder, LongAccumulator.merger).asInstanceOf[C1] + else if (info.companion == DoubleAccumulator) stream.asInstanceOf[Stream[Double]].collect(DoubleAccumulator.supplier, DoubleAccumulator.boxedAdder, DoubleAccumulator.merger).asInstanceOf[C1] + else if (stream.isParallel) anyAcc.to(factory) + else factory.fromSpecific(stream.iterator.asScala) + } + + /** Convert a generic Java Stream wrapping a primitive type to a corresponding primitive + * Stream. + */ + def asJavaPrimitiveStream[S](implicit unboxer: StreamUnboxer[A, S]): S = unboxer(stream) + } + + implicit class IntStreamHasToScala(stream: IntStream) { + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScala(Accumulator)` + * builds the result in parallel. + * + * A `toScala(Accumulator)` call automatically converts the `IntStream` to a primitive + * [[scala.jdk.IntAccumulator]]. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[C1](factory: collection.Factory[Int, C1])(implicit info: AccumulatorFactoryInfo[Int, C1]): C1 = { + def intAcc = stream.collect(IntAccumulator.supplier, IntAccumulator.adder, IntAccumulator.merger) + if (info.companion == AnyAccumulator) stream.collect(AnyAccumulator.supplier[Int], AnyAccumulator.unboxedIntAdder, AnyAccumulator.merger[Int]).asInstanceOf[C1] + else if (info.companion == IntAccumulator) intAcc.asInstanceOf[C1] + else if (stream.isParallel) intAcc.to(factory) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Int]].asScala) + } + } + + implicit class LongStreamHasToScala(stream: LongStream) { + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScala(Accumulator)` + * builds the result in parallel. + * + * A `toScala(Accumulator)` call automatically converts the `LongStream` to a primitive + * [[scala.jdk.LongAccumulator]]. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[C1](factory: collection.Factory[Long, C1])(implicit info: AccumulatorFactoryInfo[Long, C1]): C1 = { + def longAcc = stream.collect(LongAccumulator.supplier, LongAccumulator.adder, LongAccumulator.merger) + if (info.companion == AnyAccumulator) stream.collect(AnyAccumulator.supplier[Long], AnyAccumulator.unboxedLongAdder, AnyAccumulator.merger[Long]).asInstanceOf[C1] + else if (info.companion == LongAccumulator) longAcc.asInstanceOf[C1] + else if (stream.isParallel) longAcc.to(factory) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Long]].asScala) + } + } + + implicit class DoubleStreamHasToScala(stream: DoubleStream) { + /** + * Copy the elements of this stream into a Scala collection. + * + * Converting a parallel streams to an [[scala.jdk.Accumulator]] using `stream.toScala(Accumulator)` + * builds the result in parallel. + * + * A `toScala(Accumulator)` call automatically converts the `DoubleStream` to a primitive + * [[scala.jdk.DoubleAccumulator]]. + * + * When converting a parallel stream to a different Scala collection, the stream is first + * converted into an [[scala.jdk.Accumulator]], which supports parallel building. The accumulator is + * then converted to the target collection. Note that the stream is processed eagerly while + * building the accumulator, even if the target collection is lazy. + * + * Sequential streams are directly converted to the target collection. If the target collection + * is lazy, the conversion is lazy as well. + */ + def toScala[C1](factory: collection.Factory[Double, C1])(implicit info: AccumulatorFactoryInfo[Double, C1]): C1 = { + def doubleAcc = stream.collect(DoubleAccumulator.supplier, DoubleAccumulator.adder, DoubleAccumulator.merger) + if (info.companion == AnyAccumulator) stream.collect(AnyAccumulator.supplier[Double], AnyAccumulator.unboxedDoubleAdder, AnyAccumulator.merger[Double]).asInstanceOf[C1] + else if (info.companion == DoubleAccumulator) doubleAcc.asInstanceOf[C1] + else if (stream.isParallel) doubleAcc.to(factory) + else factory.fromSpecific(stream.iterator.asInstanceOf[java.util.Iterator[Double]].asScala) + } + } +} + +object StreamExtensions { + /** An implicit StreamShape instance connects element types with the corresponding specialized + * Stream and Stepper types. This is used in `asJavaStream` extension methods to create + * generic or primitive streams according to the element type. + */ + sealed trait StreamShape[T, S <: BaseStream[_, _], St <: Stepper[_]] { + final def fromStepper(st: St, par: Boolean): S = mkStream(st, par) + protected def mkStream(st: St, par: Boolean): S + } + + object StreamShape extends StreamShapeLowPriority1 { + // primitive + implicit val intStreamShape : StreamShape[Int , IntStream , IntStepper] = mkIntStreamShape[Int] + implicit val longStreamShape : StreamShape[Long , LongStream , LongStepper] = mkLongStreamShape[Long] + implicit val doubleStreamShape: StreamShape[Double, DoubleStream, DoubleStepper] = mkDoubleStreamShape[Double] + + // widening + implicit val byteStreamShape : StreamShape[Byte , IntStream , IntStepper] = mkIntStreamShape[Byte] + implicit val shortStreamShape: StreamShape[Short, IntStream , IntStepper] = mkIntStreamShape[Short] + implicit val charStreamShape : StreamShape[Char , IntStream , IntStepper] = mkIntStreamShape[Char] + implicit val floatStreamShape: StreamShape[Float, DoubleStream, DoubleStepper] = mkDoubleStreamShape[Float] + + // boxed java primitives + + implicit val jIntegerStreamShape : StreamShape[jl.Integer , IntStream , IntStepper ] = mkIntStreamShape[jl.Integer] + implicit val jLongStreamShape : StreamShape[jl.Long , LongStream , LongStepper ] = mkLongStreamShape[jl.Long] + implicit val jDoubleStreamShape : StreamShape[jl.Double , DoubleStream, DoubleStepper] = mkDoubleStreamShape[jl.Double] + implicit val jByteStreamShape : StreamShape[jl.Byte , IntStream , IntStepper ] = mkIntStreamShape[jl.Byte] + implicit val jShortStreamShape : StreamShape[jl.Short , IntStream , IntStepper ] = mkIntStreamShape[jl.Short] + implicit val jCharacterStreamShape : StreamShape[jl.Character, IntStream , IntStepper ] = mkIntStreamShape[jl.Character] + implicit val jFloatStreamShape : StreamShape[jl.Float , DoubleStream, DoubleStepper] = mkDoubleStreamShape[jl.Float] + + private def mkIntStreamShape[T]: StreamShape[T, IntStream, IntStepper] = new StreamShape[T, IntStream, IntStepper] { + protected def mkStream(st: IntStepper, par: Boolean): IntStream = StreamSupport.intStream(st.spliterator, par) + } + + private def mkLongStreamShape[T]: StreamShape[T, LongStream, LongStepper] = new StreamShape[T, LongStream, LongStepper] { + protected def mkStream(st: LongStepper, par: Boolean): LongStream = StreamSupport.longStream(st.spliterator, par) + } + + private def mkDoubleStreamShape[T]: StreamShape[T, DoubleStream, DoubleStepper] = new StreamShape[T, DoubleStream, DoubleStepper] { + protected def mkStream(st: DoubleStepper, par: Boolean): DoubleStream = StreamSupport.doubleStream(st.spliterator, par) + } + } + + trait StreamShapeLowPriority1 { + // reference + implicit def anyStreamShape[T]: StreamShape[T, Stream[T], Stepper[T]] = anyStreamShapePrototype.asInstanceOf[StreamShape[T, Stream[T], Stepper[T]]] + + private[this] val anyStreamShapePrototype: StreamShape[AnyRef, Stream[AnyRef], Stepper[AnyRef]] = new StreamShape[AnyRef, Stream[AnyRef], Stepper[AnyRef]] { + def mkStream(s: Stepper[AnyRef], par: Boolean): Stream[AnyRef] = StreamSupport.stream(s.spliterator.asInstanceOf[Spliterator[AnyRef]], par) + } + } + + /** Connects a stream element type `A` to the corresponding, potentially specialized, Stream type. + * Used in the `stream.asJavaPrimitiveStream` extension method. + */ + sealed trait StreamUnboxer[A, S] { + def apply(s: Stream[A]): S + } + object StreamUnboxer { + implicit val intStreamUnboxer: StreamUnboxer[Int, IntStream] = new StreamUnboxer[Int, IntStream] { + def apply(s: Stream[Int]): IntStream = s.mapToInt(x => x) + } + implicit val javaIntegerStreamUnboxer: StreamUnboxer[jl.Integer, IntStream] = intStreamUnboxer.asInstanceOf[StreamUnboxer[jl.Integer, IntStream]] + + implicit val longStreamUnboxer: StreamUnboxer[Long, LongStream] = new StreamUnboxer[Long, LongStream] { + def apply(s: Stream[Long]): LongStream = s.mapToLong(x => x) + } + implicit val javaLongStreamUnboxer: StreamUnboxer[jl.Long, LongStream] = longStreamUnboxer.asInstanceOf[StreamUnboxer[jl.Long, LongStream]] + + implicit val doubleStreamUnboxer: StreamUnboxer[Double, DoubleStream] = new StreamUnboxer[Double, DoubleStream] { + def apply(s: Stream[Double]): DoubleStream = s.mapToDouble(x => x) + } + implicit val javaDoubleStreamUnboxer: StreamUnboxer[jl.Double, DoubleStream] = doubleStreamUnboxer.asInstanceOf[StreamUnboxer[jl.Double, DoubleStream]] + } + + + + /** An implicit `AccumulatorFactoryInfo` connects primitive element types to the corresponding + * specialized [[scala.jdk.Accumulator]] factory. This is used in the `stream.toScala` extension methods + * to ensure collecting a primitive stream into a primitive accumulator does not box. + * + * When converting to a collection other than `Accumulator`, the generic + * `noAccumulatorFactoryInfo` is passed. + */ + trait AccumulatorFactoryInfo[A, C] { + val companion: AnyRef + } + trait LowPriorityAccumulatorFactoryInfo { + implicit def noAccumulatorFactoryInfo[A, C]: AccumulatorFactoryInfo[A, C] = noAccumulatorFactoryInfoPrototype.asInstanceOf[AccumulatorFactoryInfo[A, C]] + private val noAccumulatorFactoryInfoPrototype: AccumulatorFactoryInfo[AnyRef, AnyRef] = new AccumulatorFactoryInfo[AnyRef, AnyRef] { + val companion: AnyRef = null + } + } + object AccumulatorFactoryInfo extends LowPriorityAccumulatorFactoryInfo { + implicit def anyAccumulatorFactoryInfo[A]: AccumulatorFactoryInfo[A, AnyAccumulator[A]] = anyAccumulatorFactoryInfoPrototype.asInstanceOf[AccumulatorFactoryInfo[A, AnyAccumulator[A]]] + + private object anyAccumulatorFactoryInfoPrototype extends AccumulatorFactoryInfo[AnyRef, AnyAccumulator[AnyRef]] { + val companion: AnyRef = AnyAccumulator + } + + implicit val intAccumulatorFactoryInfo: AccumulatorFactoryInfo[Int, IntAccumulator] = new AccumulatorFactoryInfo[Int, IntAccumulator] { + val companion: AnyRef = IntAccumulator + } + + implicit val longAccumulatorFactoryInfo: AccumulatorFactoryInfo[Long, LongAccumulator] = new AccumulatorFactoryInfo[Long, LongAccumulator] { + val companion: AnyRef = LongAccumulator + } + + implicit val doubleAccumulatorFactoryInfo: AccumulatorFactoryInfo[Double, DoubleAccumulator] = new AccumulatorFactoryInfo[Double, DoubleAccumulator] { + val companion: AnyRef = DoubleAccumulator + } + + implicit val jIntegerAccumulatorFactoryInfo: AccumulatorFactoryInfo[jl.Integer, IntAccumulator] = intAccumulatorFactoryInfo.asInstanceOf[AccumulatorFactoryInfo[jl.Integer, IntAccumulator]] + implicit val jLongAccumulatorFactoryInfo: AccumulatorFactoryInfo[jl.Long, IntAccumulator] = longAccumulatorFactoryInfo.asInstanceOf[AccumulatorFactoryInfo[jl.Long, IntAccumulator]] + implicit val jDoubleAccumulatorFactoryInfo: AccumulatorFactoryInfo[jl.Double, IntAccumulator] = doubleAccumulatorFactoryInfo.asInstanceOf[AccumulatorFactoryInfo[jl.Double, IntAccumulator]] + } +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala new file mode 100644 index 000000000000..845ecb4a606d --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala @@ -0,0 +1,79 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection._ + +private[collection] class ObjectArrayStepper[A <: Object](underlying: Array[A], _i0: Int, _iN: Int) + extends IndexedStepperBase[AnyStepper[A], ObjectArrayStepper[A]](_i0, _iN) + with AnyStepper[A] { + def nextStep(): A = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): ObjectArrayStepper[A] = new ObjectArrayStepper[A](underlying, i0, half) +} + +private[collection] class BoxedBooleanArrayStepper(underlying: Array[Boolean], _i0: Int, _iN: Int) + extends IndexedStepperBase[AnyStepper[Boolean], BoxedBooleanArrayStepper](_i0, _iN) + with AnyStepper[Boolean] { + def nextStep(): Boolean = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): BoxedBooleanArrayStepper = new BoxedBooleanArrayStepper(underlying, i0, half) +} + +private[collection] class WidenedByteArrayStepper(underlying: Array[Byte], _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, WidenedByteArrayStepper](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): WidenedByteArrayStepper = new WidenedByteArrayStepper(underlying, i0, half) +} + +private[collection] class WidenedCharArrayStepper(underlying: Array[Char], _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, WidenedCharArrayStepper](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): WidenedCharArrayStepper = new WidenedCharArrayStepper(underlying, i0, half) +} + +private[collection] class WidenedShortArrayStepper(underlying: Array[Short], _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, WidenedShortArrayStepper](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): WidenedShortArrayStepper = new WidenedShortArrayStepper(underlying, i0, half) +} + +private[collection] class WidenedFloatArrayStepper(underlying: Array[Float], _i0: Int, _iN: Int) + extends IndexedStepperBase[DoubleStepper, WidenedFloatArrayStepper](_i0, _iN) + with DoubleStepper { + def nextStep(): Double = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): WidenedFloatArrayStepper = new WidenedFloatArrayStepper(underlying, i0, half) +} + +private[collection] class DoubleArrayStepper(underlying: Array[Double], _i0: Int, _iN: Int) + extends IndexedStepperBase[DoubleStepper, DoubleArrayStepper](_i0, _iN) + with DoubleStepper { + def nextStep(): Double = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): DoubleArrayStepper = new DoubleArrayStepper(underlying, i0, half) +} + +private[collection] class IntArrayStepper(underlying: Array[Int], _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, IntArrayStepper](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): IntArrayStepper = new IntArrayStepper(underlying, i0, half) +} + +private[collection] class LongArrayStepper(underlying: Array[Long], _i0: Int, _iN: Int) + extends IndexedStepperBase[LongStepper, LongArrayStepper](_i0, _iN) + with LongStepper { + def nextStep(): Long = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): LongArrayStepper = new LongArrayStepper(underlying, i0, half) +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala new file mode 100644 index 000000000000..7c795aea5391 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala @@ -0,0 +1,248 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.util.Spliterator + +import annotation.tailrec +import scala.collection.Stepper.EfficientSplit +import scala.collection._ + + +private[collection] object BinaryTreeStepper { + val emptyStack = new Array[AnyRef](0) +} + + +/** A generic stepper that can traverse ordered binary trees. + * The tree is assumed to have all the stuff on the left first, then the root, then everything on the right. + * + * Splits occur at the root of whatever has not yet been traversed (the substepper steps up to but + * does not include the root). + * + * The stepper maintains an internal stack, not relying on the tree traversal to be reversible. Trees with + * nodes that maintain a parent pointer may be traversed slightly faster without a stack, but splitting is + * more awkward. + * + * Algorithmically, this class implements a simple state machine that unrolls the left-leaning links in + * a binary tree onto a stack. At all times, the machine should be in one of these states: + * 1. Empty: `myCurrent` is `null` and `index` is `-1`. `stack` should also be `Array.empty` then. + * 2. Ready: `myCurrent` is not `null` and contains the next `A` to be extracted + * 3. Pending: `myCurrent` is `null` and `stack(index)` contains the next node to visit + * + * Subclasses should allow this class to do all the work of maintaining state; `next` should simply + * reduce `maxLength` by one, and consume `myCurrent` and set it to `null` if `hasNext` is true. + */ +private[collection] abstract class BinaryTreeStepperBase[A, T >: Null <: AnyRef, Sub >: Null, Semi <: Sub with BinaryTreeStepperBase[A, T, _, _]]( + protected var maxLength: Int, protected var myCurrent: T, protected var stack: Array[AnyRef], protected var index: Int, + protected val left: T => T, protected val right: T => T +) +extends EfficientSplit { + /** Unrolls a subtree onto the stack starting from a particular node, returning + * the last node found. This final node is _not_ placed on the stack, and + * may have things to its right. + */ + @tailrec protected final def unroll(from: T): T = { + val l = left(from) + if (l eq null) from + else { + if (index+1 >= stack.length) stack = java.util.Arrays.copyOf(stack, 4 + stack.length*2) + index += 1 + stack(index) = from + unroll(l) + } + } + + /** Takes a subtree whose left side, if any, has already been visited, and unrolls + * the right side of the tree onto the stack, thereby detaching that node of + * the subtree from the stack entirely (so it is ready to use). It returns + * the node that is being detached. Note that the node must _not_ already be + * on the stack. + */ + protected final def detach(node: T): node.type = { + val r = right(node) + if (r ne null) { + val last = unroll(r) + if (index+1 >= stack.length) stack = java.util.Arrays.copyOf(stack, 4 + stack.length*2) + index += 1 + stack(index) = last + } + node + } + + /** Given an empty state and the root of a new tree, initialize the tree properly + * to be in an (appropriate) ready state. Will do all sorts of wrong stuff if the + * tree is not already empty. + * + * Right now overwrites everything so could allow reuse, but isn't used for it. + */ + private[impl] final def initialize(root: T, size: Int): Unit = + if (root eq null) { + maxLength = 0 + myCurrent = null + stack = BinaryTreeStepper.emptyStack + index = -1 + } + else { + maxLength = size + index = -1 + myCurrent = detach(unroll(root)) + } + + protected def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): Semi + + def characteristics: Int = Spliterator.ORDERED + + def estimateSize: Long = if (hasStep) maxLength else 0 + + def hasStep: Boolean = (myCurrent ne null) || (maxLength > 0 && { + if (index < 0) { maxLength = 0; stack = BinaryTreeStepper.emptyStack; false } + else { + val ans = stack(index).asInstanceOf[T] + index -= 1 + myCurrent = detach(ans) + true + } + }) + + /** Splits the tree at the root by giving everything unrolled on the stack to a new stepper, + * detaching the root, and leaving the right-hand side of the root unrolled. + * + * If the tree is empty or only has one element left, it returns `null` instead of splitting. + */ + def trySplit(): Sub = + if (!hasStep || index < 0) null + else { + val root = stack(0).asInstanceOf[T] + val leftStack = + if (index > 0) java.util.Arrays.copyOfRange(stack, 1, index+1) + else BinaryTreeStepper.emptyStack + val leftIndex = index - 1 + val leftCurrent = myCurrent + var leftMax = maxLength + index = -1 + detach(root) + myCurrent = root + leftMax -= 2+index + maxLength -= 2+leftIndex + semiclone(leftMax, leftCurrent, leftStack, leftIndex) + } +} + + +private[collection] final class AnyBinaryTreeStepper[A, T >: Null <: AnyRef]( + _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => A +) +extends BinaryTreeStepperBase[A, T, AnyStepper[A], AnyBinaryTreeStepper[A, T]](_maxLength, _myCurrent, _stack, _index, _left, _right) +with AnyStepper[A] { + def nextStep(): A = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = null + maxLength -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): AnyBinaryTreeStepper[A, T] = + new AnyBinaryTreeStepper[A, T](maxL, myC, stk, ix, left, right, extract) +} +private[collection] object AnyBinaryTreeStepper { + def from[A, T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => A): AnyBinaryTreeStepper[A, T] = { + val ans = new AnyBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) + ans.initialize(root, maxLength) + ans + } +} + + +private[collection] final class DoubleBinaryTreeStepper[T >: Null <: AnyRef]( + _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => Double +) +extends BinaryTreeStepperBase[Double, T, DoubleStepper, DoubleBinaryTreeStepper[T]](_maxLength, _myCurrent, _stack, _index, _left, _right) +with DoubleStepper { + def nextStep(): Double = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = null + maxLength -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): DoubleBinaryTreeStepper[T] = + new DoubleBinaryTreeStepper[T](maxL, myC, stk, ix, left, right, extract) +} +private [collection] object DoubleBinaryTreeStepper { + def from[T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => Double): DoubleBinaryTreeStepper[T] = { + val ans = new DoubleBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) + ans.initialize(root, maxLength) + ans + } +} + + +private[collection] final class IntBinaryTreeStepper[T >: Null <: AnyRef]( + _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => Int +) +extends BinaryTreeStepperBase[Int, T, IntStepper, IntBinaryTreeStepper[T]](_maxLength, _myCurrent, _stack, _index, _left, _right) +with IntStepper { + def nextStep(): Int = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = null + maxLength -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): IntBinaryTreeStepper[T] = + new IntBinaryTreeStepper[T](maxL, myC, stk, ix, left, right, extract) +} +private [collection] object IntBinaryTreeStepper { + def from[T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => Int): IntBinaryTreeStepper[T] = { + val ans = new IntBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) + ans.initialize(root, maxLength) + ans + } +} + + + +private[collection] final class LongBinaryTreeStepper[T >: Null <: AnyRef]( + _maxLength: Int, _myCurrent: T, _stack: Array[AnyRef], _index: Int, _left: T => T, _right: T => T, protected val extract: T => Long +) +extends BinaryTreeStepperBase[Long, T, LongStepper, LongBinaryTreeStepper[T]](_maxLength, _myCurrent, _stack, _index, _left, _right) +with LongStepper { + def nextStep(): Long = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = null + maxLength -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(maxL: Int, myC: T, stk: Array[AnyRef], ix: Int): LongBinaryTreeStepper[T] = + new LongBinaryTreeStepper[T](maxL, myC, stk, ix, left, right, extract) +} +private [collection] object LongBinaryTreeStepper { + def from[T >: Null <: AnyRef](maxLength: Int, root: T, left: T => T, right: T => T, extract: T => Long): LongBinaryTreeStepper[T] = { + val ans = new LongBinaryTreeStepper(0, null, BinaryTreeStepper.emptyStack, -1, left, right, extract) + ans.initialize(root, maxLength) + ans + } +} + + diff --git a/tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala new file mode 100644 index 000000000000..574e7fd50f1c --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala @@ -0,0 +1,118 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.Stepper.EfficientSplit +import scala.collection.{BitSetOps, IntStepper, Stepper} + + +private[collection] final class BitSetStepper( + private var underlying: BitSetOps[_], + private var cache0: Long, private var cache1: Long, + _i0: Int, _iN: Int, + private var cacheIndex: Int +) +extends InOrderStepperBase[IntStepper, BitSetStepper](_i0, _iN) +with IntStepper { + import BitSetOps.{WordLength, LogWL} + + // When `found` is set, `i0` is an element that exists + protected var found: Boolean = false + + @annotation.tailrec + protected def findNext(): Boolean = + if (i0 >= iN) false + else { + val ix = i0 >> LogWL + if (ix == cacheIndex || ix == cacheIndex+1) { + val i = scanLong(if (ix == cacheIndex) cache0 else cache1, i0 & (WordLength - 1)) + if (i >= 0) { + i0 = (i0 & ~(WordLength - 1)) | i + found = (i0 < iN) + found + } + else { + i0 = (i0 & ~(WordLength - 1)) + WordLength + findNext() + } + } + else if (underlying eq null) { + i0 = iN + found = false + found + } + else { + cacheIndex = ix + cache0 = underlying.word(cacheIndex) + cache1 = if ((iN - 1) >> LogWL == ix) -1L else underlying.word(cacheIndex+1) + findNext() + } + } + + def semiclone(half: Int): BitSetStepper = + if (underlying == null) { + val ans = new BitSetStepper(null, cache0, cache1, i0, half, cacheIndex) + ans.found = found + i0 = half + found = false + ans + } + else { + // Set up new stepper + val ixNewN = (half - 1) >> LogWL + val ans = + new BitSetStepper(if (ixNewN <= cacheIndex + 1) null else underlying, cache0, cache1, i0, half, cacheIndex) + if (found) ans.found = true + + // Advance old stepper to breakpoint + val ixOld0 = half >> LogWL + if (ixOld0 > cacheIndex + 1) { + cache0 = underlying.word(ixOld0) + cache1 = if (((iN - 1) >> LogWL) == ixOld0) -1L else underlying.word(ixOld0+1) + cacheIndex = ixOld0 + i0 = half + found = false + } + + // Return new stepper + ans + } + + @annotation.tailrec + private[this] def scanLong(bits: Long, from: Int): Int = + if (from >= WordLength) -1 + else if ((bits & (1L << from)) != 0) from + else scanLong(bits, from + 1) + + def nextStep(): Int = + if (found || findNext()) { + found = false + val ans = i0 + i0 += 1 + ans + } + else Stepper.throwNSEE() +} + +private[collection] object BitSetStepper { + def from(bs: scala.collection.BitSetOps[_]): IntStepper with EfficientSplit = + new BitSetStepper( + if (bs.nwords <= 2) null else bs, + if (bs.nwords <= 0) -1L else bs.word(0), + if (bs.nwords <= 1) -1L else bs.word(1), + 0, + bs.nwords * BitSetOps.WordLength, + 0 + ) +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala new file mode 100644 index 000000000000..466e6c440f45 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala @@ -0,0 +1,245 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.Stepper.EfficientSplit +import scala.collection._ +import scala.collection.immutable.Node + +/** A stepper that is a slightly elaborated version of the ChampBaseIterator; + * the main difference is that it knows when it should stop instead of running + * to the end of all trees. + */ +private[collection] abstract class ChampStepperBase[ + A, T <: Node[T], Sub >: Null, Semi <: Sub with ChampStepperBase[A, T, _, _] +](protected var maxSize: Int) +extends EfficientSplit { + import Node.MaxDepth + + // Much of this code is identical to ChampBaseIterator. If you change that, look here too! + + protected var currentValueCursor: Int = 0 + protected var currentValueLength: Int = 0 + protected var currentValueNode: T = _ + + private var currentStackLevel: Int = -1 + private var nodeCursorsAndLengths: Array[Int] = _ + private var nodes: Array[T] = _ + + private def initNodes(): Unit = { + if (nodeCursorsAndLengths eq null) { + nodeCursorsAndLengths = new Array[Int](MaxDepth * 2) + nodes = new Array[Node[T]](MaxDepth).asInstanceOf[Array[T]] + } + } + def initRoot(rootNode: T): Unit = { + if (rootNode.hasNodes) pushNode(rootNode) + if (rootNode.hasPayload) setupPayloadNode(rootNode) + } + + private final def setupPayloadNode(node: T): Unit = { + currentValueNode = node + currentValueCursor = 0 + currentValueLength = node.payloadArity + } + + private final def pushNode(node: T): Unit = { + initNodes() + currentStackLevel = currentStackLevel + 1 + + val cursorIndex = currentStackLevel * 2 + val lengthIndex = currentStackLevel * 2 + 1 + + nodes(currentStackLevel) = node + nodeCursorsAndLengths(cursorIndex) = 0 + nodeCursorsAndLengths(lengthIndex) = node.nodeArity + } + + private final def popNode(): Unit = { + currentStackLevel = currentStackLevel - 1 + } + + /** + * Searches for next node that contains payload values, + * and pushes encountered sub-nodes on a stack for depth-first traversal. + */ + private final def searchNextValueNode(): Boolean = { + while (currentStackLevel >= 0) { + val cursorIndex = currentStackLevel * 2 + val lengthIndex = currentStackLevel * 2 + 1 + + val nodeCursor = nodeCursorsAndLengths(cursorIndex) + val nodeLength = nodeCursorsAndLengths(lengthIndex) + + if (nodeCursor < nodeLength) { + nodeCursorsAndLengths(cursorIndex) += 1 + + val nextNode = nodes(currentStackLevel).getNode(nodeCursor) + + if (nextNode.hasNodes) { pushNode(nextNode) } + if (nextNode.hasPayload) { setupPayloadNode(nextNode) ; return true } + } else { + popNode() + } + } + false + } + + def characteristics: Int = 0 + + def estimateSize: Long = if (hasStep) maxSize else 0L + + def semiclone(): Semi + + final def hasStep: Boolean = maxSize > 0 && { + val ans = (currentValueCursor < currentValueLength) || searchNextValueNode() + if (!ans) maxSize = 0 + ans + } + + final def trySplit(): Sub = + if (!hasStep) null + else { + var fork = 0 + while (fork <= currentStackLevel && nodeCursorsAndLengths(2*fork) >= nodeCursorsAndLengths(2*fork + 1)) fork += 1 + if (fork > currentStackLevel && currentValueCursor > currentValueLength -2) null + else { + val semi = semiclone() + semi.maxSize = maxSize + semi.currentValueCursor = currentValueCursor + semi.currentValueNode = currentValueNode + if (fork > currentStackLevel) { + // Just need to finish the current node + semi.currentStackLevel = -1 + val i = (currentValueCursor + currentValueLength) >>> 1 + semi.currentValueLength = i + currentValueCursor = i + } + else { + // Need (at least some of) the full stack, so make an identical copy + semi.nodeCursorsAndLengths = java.util.Arrays.copyOf(nodeCursorsAndLengths, nodeCursorsAndLengths.length) + semi.nodes = java.util.Arrays.copyOf(nodes.asInstanceOf[Array[Node[T]]], nodes.length).asInstanceOf[Array[T]] + semi.currentStackLevel = currentStackLevel + semi.currentValueLength = currentValueLength + + // Split the top level of the stack where there's still something to split + // Could make this more efficient by duplicating code from searchNextValueNode + // instead of setting up for it to run normally. But splits tend to be rare, + // so it's not critically important. + // + // Note that this split can be kind of uneven; if we knew how many child nodes there + // were we could do better. + val i = (nodeCursorsAndLengths(2*fork) + nodeCursorsAndLengths(2*fork + 1)) >>> 1 + semi.nodeCursorsAndLengths(2*fork + 1) = i + var j = currentStackLevel + while (j > fork) { + nodeCursorsAndLengths(2*j) = nodeCursorsAndLengths(2*j + 1) + j -= 1 + } + nodeCursorsAndLengths(2*fork) = i + searchNextValueNode() + } + semi + } + } +} + + +private[collection] final class AnyChampStepper[A, T >: Null <: Node[T]](_maxSize: Int, protected val extract: (T, Int) => A) +extends ChampStepperBase[A, T, AnyStepper[A], AnyChampStepper[A, T]](_maxSize) +with AnyStepper[A] { + def nextStep(): A = + if (hasStep) { + val ans = extract(currentValueNode, currentValueCursor) + currentValueCursor += 1 + maxSize -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(): AnyChampStepper[A, T] = new AnyChampStepper[A, T](0, extract) +} +private[collection] object AnyChampStepper { + def from[A, T >: Null <: Node[T]](maxSize: Int, root: T, extract: (T, Int) => A): AnyChampStepper[A, T] = { + val ans = new AnyChampStepper[A, T](maxSize, extract) + ans.initRoot(root) + ans + } +} + +private[collection] final class DoubleChampStepper[T >: Null <: Node[T]](_maxSize: Int, protected val extract: (T, Int) => Double) +extends ChampStepperBase[Double, T, DoubleStepper, DoubleChampStepper[T]](_maxSize) +with DoubleStepper { + def nextStep(): Double = + if (hasStep) { + val ans = extract(currentValueNode, currentValueCursor) + currentValueCursor += 1 + maxSize -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(): DoubleChampStepper[T] = new DoubleChampStepper[T](0, extract) +} +private[collection] object DoubleChampStepper { + def from[T >: Null <: Node[T]](maxSize: Int, root: T, extract: (T, Int) => Double): DoubleChampStepper[T] = { + val ans = new DoubleChampStepper[T](maxSize, extract) + ans.initRoot(root) + ans + } +} + +private[collection] final class IntChampStepper[T >: Null <: Node[T]](_maxSize: Int, protected val extract: (T, Int) => Int) +extends ChampStepperBase[Int, T, IntStepper, IntChampStepper[T]](_maxSize) +with IntStepper { + def nextStep(): Int = + if (hasStep) { + val ans = extract(currentValueNode, currentValueCursor) + currentValueCursor += 1 + maxSize -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(): IntChampStepper[T] = new IntChampStepper[T](0, extract) +} +private[collection] object IntChampStepper { + def from[T >: Null <: Node[T]](maxSize: Int, root: T, extract: (T, Int) => Int): IntChampStepper[T] = { + val ans = new IntChampStepper[T](maxSize, extract) + ans.initRoot(root) + ans + } +} + +private[collection] final class LongChampStepper[T >: Null <: Node[T]](_maxSize: Int, protected val extract: (T, Int) => Long) +extends ChampStepperBase[Long, T, LongStepper, LongChampStepper[T]](_maxSize) +with LongStepper { + def nextStep(): Long = + if (hasStep) { + val ans = extract(currentValueNode, currentValueCursor) + currentValueCursor += 1 + maxSize -= 1 + ans + } + else Stepper.throwNSEE() + + def semiclone(): LongChampStepper[T] = new LongChampStepper[T](0, extract) +} +private[collection] object LongChampStepper { + def from[T >: Null <: Node[T]](maxSize: Int, root: T, extract: (T, Int) => Long): LongChampStepper[T] = { + val ans = new LongChampStepper[T](maxSize, extract) + ans.initRoot(root) + ans + } +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala b/tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala new file mode 100644 index 000000000000..2d1f88d02930 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala @@ -0,0 +1,53 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.util.Spliterator + +import scala.collection.Stepper.EfficientSplit + +/** Abstracts all the generic operations of stepping over a collection + * that has an indexable ordering but may have gaps. + * + * For collections that are guaranteed to not have gaps, use `IndexedStepperBase` instead. + */ +private[convert] abstract class InOrderStepperBase[Sub >: Null, Semi <: Sub](protected var i0: Int, protected var iN: Int) +extends EfficientSplit { + /** Set `true` if the element at `i0` is known to be there. `false` if either not known or is a gap. + */ + protected def found: Boolean + + /** Advance `i0` over any gaps, updating internal state so `found` is correct at the new position. + * Returns the new value of `found`. + */ + protected def findNext(): Boolean + + protected def semiclone(half: Int): Semi + + final def hasStep: Boolean = found || findNext() + + def characteristics: Int = Spliterator.ORDERED + + def estimateSize: Long = iN - i0 + + def trySplit(): Sub = { + if (iN-1 > i0) { + val half = (i0 + iN) >>> 1 + val ans = semiclone(half) + i0 = half + ans + } + else null + } +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala new file mode 100644 index 000000000000..136ac8d2dcc3 --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala @@ -0,0 +1,44 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection._ + +private[collection] class AnyIndexedSeqStepper[A](underlying: collection.IndexedSeqOps[A, AnyConstr, _], _i0: Int, _iN: Int) + extends IndexedStepperBase[AnyStepper[A], AnyIndexedSeqStepper[A]](_i0, _iN) + with AnyStepper[A] { + def nextStep(): A = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): AnyIndexedSeqStepper[A] = new AnyIndexedSeqStepper[A](underlying, i0, half) +} + +private[collection] class DoubleIndexedSeqStepper[CC <: collection.IndexedSeqOps[Double, AnyConstr, _]](underlying: CC, _i0: Int, _iN: Int) + extends IndexedStepperBase[DoubleStepper, DoubleIndexedSeqStepper[CC]](_i0, _iN) + with DoubleStepper { + def nextStep(): Double = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): DoubleIndexedSeqStepper[CC] = new DoubleIndexedSeqStepper[CC](underlying, i0, half) +} + +private[collection] class IntIndexedSeqStepper[CC <: collection.IndexedSeqOps[Int, AnyConstr, _]](underlying: CC, _i0: Int, _iN: Int) + extends IndexedStepperBase[IntStepper, IntIndexedSeqStepper[CC]](_i0, _iN) + with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): IntIndexedSeqStepper[CC] = new IntIndexedSeqStepper[CC](underlying, i0, half) +} + +private[collection] class LongIndexedSeqStepper[CC <: collection.IndexedSeqOps[Long, AnyConstr, _]](underlying: CC, _i0: Int, _iN: Int) + extends IndexedStepperBase[LongStepper, LongIndexedSeqStepper[CC]](_i0, _iN) + with LongStepper { + def nextStep(): Long = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + protected def semiclone(half: Int): LongIndexedSeqStepper[CC] = new LongIndexedSeqStepper[CC](underlying, i0, half) +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala b/tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala new file mode 100644 index 000000000000..4670ccc56bfc --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala @@ -0,0 +1,40 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.util.Spliterator + +import scala.collection.Stepper.EfficientSplit + +/** Abstracts all the generic operations of stepping over an indexable collection */ +private[convert] abstract class IndexedStepperBase[Sub >: Null, Semi <: Sub](protected var i0: Int, protected var iN: Int) + extends EfficientSplit { + protected def semiclone(half: Int): Semi + + def hasStep: Boolean = i0 < iN + + def characteristics: Int = Spliterator.ORDERED + Spliterator.SIZED + Spliterator.SUBSIZED + + def estimateSize: Long = iN - i0 + + def trySplit(): Sub = { + if (iN-1 > i0) { + val half = (i0+iN) >>> 1 + val ans = semiclone(half) + i0 = half + ans + } + else null + } +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala new file mode 100644 index 000000000000..68b318c04c9c --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala @@ -0,0 +1,129 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.util.Spliterator + +import scala.collection.{AnyStepper, DoubleStepper, IntStepper, LongStepper, Stepper} +import scala.jdk.{AnyAccumulator, DoubleAccumulator, IntAccumulator, LongAccumulator} + +private[collection] class AnyIteratorStepper[A](_underlying: Iterator[A]) + extends IteratorStepperBase[A, AnyStepper[A], AnyIteratorStepper[A]](_underlying) + with AnyStepper[A] { + protected def semiclone(): AnyIteratorStepper[A] = new AnyIteratorStepper(null) + + def nextStep(): A = if (proxied ne null) proxied.nextStep() else underlying.next() + + def trySplit(): AnyStepper[A] = if (proxied ne null) proxied.trySplit() else { + val acc = new AnyAccumulator[A] + var i = 0 + val n = nextChunkSize & 0xFFFFFFFC + while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.trySplit() + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +private[collection] class DoubleIteratorStepper(_underlying: Iterator[Double]) + extends IteratorStepperBase[Double, DoubleStepper, DoubleIteratorStepper](_underlying) + with DoubleStepper { + protected def semiclone(): DoubleIteratorStepper = new DoubleIteratorStepper(null) + + def nextStep(): Double = if (proxied ne null) proxied.nextStep() else underlying.next() + + def trySplit(): DoubleStepper = if (proxied ne null) proxied.trySplit() else { + val acc = new DoubleAccumulator + var i = 0 + val n = nextChunkSize & 0xFFFFFFFC + while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.trySplit() + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +private[collection] class IntIteratorStepper(_underlying: Iterator[Int]) + extends IteratorStepperBase[Int, IntStepper, IntIteratorStepper](_underlying) + with IntStepper { + protected def semiclone(): IntIteratorStepper = new IntIteratorStepper(null) + + def nextStep(): Int = if (proxied ne null) proxied.nextStep() else underlying.next() + + def trySplit(): IntStepper = if (proxied ne null) proxied.trySplit() else { + val acc = new IntAccumulator + var i = 0 + val n = nextChunkSize & 0xFFFFFFFC + while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.trySplit() + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +private[collection] class LongIteratorStepper(_underlying: Iterator[Long]) + extends IteratorStepperBase[Long, LongStepper, LongIteratorStepper](_underlying) + with LongStepper { + protected def semiclone(): LongIteratorStepper = new LongIteratorStepper(null) + + def nextStep(): Long = if (proxied ne null) proxied.nextStep() else underlying.next() + + def trySplit(): LongStepper = if (proxied ne null) proxied.trySplit() else { + val acc = new LongAccumulator + var i = 0 + val n = nextChunkSize & 0xFFFFFFFC + while (i < n && underlying.hasNext) { acc += underlying.next(); i += 1 } + if (i < n || !underlying.hasNext) { + proxied = acc.stepper + proxied.trySplit() + } + else { + val ans = semiclone() + ans.proxied = acc.stepper + nextChunkSize = if ((nextChunkSize&3) == 3) { if (n < 0x40000000) n*2 else n } else nextChunkSize + 1 + ans + } + } +} + +/** Common functionality for Steppers that step through an Iterator, caching the results as needed when a split is requested. */ +private[convert] abstract class IteratorStepperBase[A, SP >: Null <: Stepper[A], Semi <: SP](final protected val underlying: Iterator[A]) { + final protected var nextChunkSize = 16 + final protected var proxied: SP = null + protected def semiclone(): Semi // Must initialize with null iterator! + def characteristics: Int = if (proxied ne null) Spliterator.ORDERED | Spliterator.SIZED | Spliterator.SUBSIZED else Spliterator.ORDERED + def estimateSize: Long = if (proxied ne null) proxied.estimateSize else Long.MaxValue + def hasStep: Boolean = if (proxied ne null) proxied.hasStep else underlying.hasNext +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala new file mode 100644 index 000000000000..89e17bbf467c --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala @@ -0,0 +1,38 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.{AnyStepper, IntStepper, LongStepper, Stepper} +import scala.collection.immutable.NumericRange + +private[collection] class AnyNumericRangeStepper[A](underlying: NumericRange[A], _i0: Int, _iN: Int) +extends IndexedStepperBase[AnyStepper[A], AnyNumericRangeStepper[A]](_i0, _iN) +with AnyStepper[A] { + def nextStep(): A = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + def semiclone(half: Int) = new AnyNumericRangeStepper[A](underlying, i0, half) +} + +private[collection] class IntNumericRangeStepper(underlying: NumericRange[Int], _i0: Int, _iN: Int) +extends IndexedStepperBase[IntStepper, IntNumericRangeStepper](_i0, _iN) +with IntStepper { + def nextStep(): Int = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + def semiclone(half: Int) = new IntNumericRangeStepper(underlying, i0, half) +} + +private[collection] class LongNumericRangeStepper(underlying: NumericRange[Long], _i0: Int, _iN: Int) +extends IndexedStepperBase[LongStepper, LongNumericRangeStepper](_i0, _iN) +with LongStepper { + def nextStep(): Long = if (hasStep) { val j = i0; i0 += 1; underlying(j) } else Stepper.throwNSEE() + def semiclone(half: Int) = new LongNumericRangeStepper(underlying, i0, half) +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala new file mode 100644 index 000000000000..282ddb4aa2ad --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala @@ -0,0 +1,40 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.{IntStepper, Stepper} + +/** Implements Stepper on an integer Range. You don't actually need the Range to do this, + * so only the relevant parts are included. Because the arguments are protected, they are + * not error-checked; `Range` is required to provide valid arguments. + */ +private[collection] final class RangeStepper(protected var myNext: Int, myStep: Int, _i0: Int, _iN: Int) +extends IndexedStepperBase[IntStepper, RangeStepper](_i0, _iN) +with IntStepper { + def nextStep(): Int = + if (hasStep) { + val ans = myNext + myNext += myStep + i0 += 1 + ans + } + else Stepper.throwNSEE() + protected def semiclone(half: Int): RangeStepper = new RangeStepper(myNext, myStep, i0, half) + override def trySplit(): IntStepper = { + val old_i0 = i0 + val ans = super.trySplit() + myNext += (i0 - old_i0) * myStep + ans + } +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala new file mode 100644 index 000000000000..8990f462b4fd --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala @@ -0,0 +1,58 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import java.lang.Character.{charCount, isLowSurrogate} +import java.util.Spliterator + +import scala.collection.Stepper.EfficientSplit +import scala.collection.{IntStepper, Stepper} + +/** Implements `Stepper` on a `String` where you step through chars packed into `Int`. + */ +private[collection] final class CharStringStepper(underlying: String, _i0: Int, _iN: Int) +extends IndexedStepperBase[IntStepper, CharStringStepper](_i0, _iN) +with IntStepper { + def nextStep(): Int = + if (hasStep) { val j = i0; i0 += 1; underlying.charAt(j) } + else Stepper.throwNSEE() + + def semiclone(half: Int): CharStringStepper = new CharStringStepper(underlying, i0, half) +} + +/** Implements `Stepper` on a `String` where you step through code points. + */ +private[collection] final class CodePointStringStepper(underlying: String, private var i0: Int, private var iN: Int) +extends IntStepper with EfficientSplit { + def characteristics: Int = Spliterator.IMMUTABLE | Spliterator.NONNULL | Spliterator.ORDERED + def estimateSize: Long = iN - i0 + def hasStep: Boolean = i0 < iN + def nextStep(): Int = { + if (hasStep) { + val cp = underlying.codePointAt(i0) + i0 += charCount(cp) + cp + } + else Stepper.throwNSEE() + } + def trySplit(): CodePointStringStepper = + if (iN - 3 > i0) { + var half = (i0 + iN) >>> 1 + if (isLowSurrogate(underlying.charAt(half))) half -= 1 + val ans = new CodePointStringStepper(underlying, i0, half) + i0 = half + ans + } + else null +} diff --git a/tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala new file mode 100644 index 000000000000..cac041a5237b --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala @@ -0,0 +1,138 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection.Stepper.EfficientSplit +import scala.collection._ + +private[collection] abstract class TableStepperBase[A, I >: Null <: AnyRef, Sub >: Null, Semi <: Sub with TableStepperBase[A, I, _, _]]( + protected var maxLength: Int, protected val table: Array[I], protected var i0: Int, protected val iN: Int +) +extends EfficientSplit { + // Always holds table(i0); if `null` it is time to switch to the next element + protected var myCurrent: I = if (i0 < iN) table(i0) else null + + // Only call this when `myCurrent` is null (meaning we need to advance) + @annotation.tailrec + protected final def findNextCurrent(): Boolean = + if (i0 < iN) { + i0 += 1 + if (i0 >= iN) false + else { + myCurrent = table(i0) + if (myCurrent eq null) findNextCurrent() + else true + } + } + else false + + protected def semiclone(half: Int): Semi + + def characteristics: Int = 0 + + def estimateSize: Long = if (!hasStep) { maxLength = 0; 0 } else maxLength + + def hasStep: Boolean = (myCurrent ne null) || findNextCurrent() + + def trySplit(): Sub = { + if (iN-1 > i0 && maxLength > 0) { + val half = (i0 + iN) >>> 1 + val ans = semiclone(half) + ans.myCurrent = myCurrent + myCurrent = table(half) + var inLeft = if (ans.myCurrent ne null) 1 else 0 + var inRight = if (myCurrent ne null) 1 else 0 + if (iN - i0 < 32) { + var i = i0+1 + while (i < half && (table(i) ne null)) { i += 1; inLeft += 1 } + i = half+1 + while (i < iN && (table(i) ne null)) { i += 1; inRight += 1 } + } + maxLength -= inLeft + ans.maxLength -= inRight + i0 = half + ans + } + else null + } +} + + +private[collection] final class AnyTableStepper[A, I >: Null <: AnyRef]( + _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => A, _i0: Int, _iN: Int +) +extends TableStepperBase[A, I, AnyStepper[A], AnyTableStepper[A, I]](_maxLength, _table, _i0, _iN) +with AnyStepper[A] { + def nextStep(): A = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = iterate(myCurrent) + ans + } + else Stepper.throwNSEE() + + def semiclone(half: Int): AnyTableStepper[A, I] = new AnyTableStepper[A, I](maxLength, table, iterate, extract, i0, half) +} + + +private[collection] final class DoubleTableStepper[I >: Null <: AnyRef]( + _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => Double, _i0: Int, _iN: Int +) +extends TableStepperBase[Double, I, DoubleStepper, DoubleTableStepper[I]](_maxLength, _table, _i0, _iN) +with DoubleStepper { + def nextStep(): Double = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = iterate(myCurrent) + ans + } + else Stepper.throwNSEE() + + def semiclone(half: Int): DoubleTableStepper[I] = new DoubleTableStepper[I](maxLength, table, iterate, extract, i0, half) +} + + +private[collection] final class IntTableStepper[I >: Null <: AnyRef]( + _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => Int, _i0: Int, _iN: Int +) +extends TableStepperBase[Int, I, IntStepper, IntTableStepper[I]](_maxLength, _table, _i0, _iN) +with IntStepper { + def nextStep(): Int = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = iterate(myCurrent) + ans + } + else Stepper.throwNSEE() + + def semiclone(half: Int): IntTableStepper[I] = new IntTableStepper[I](maxLength, table, iterate, extract, i0, half) +} + + +private[collection] final class LongTableStepper[I >: Null <: AnyRef]( + _maxLength: Int, _table: Array[I], iterate: I => I, extract: I => Long, _i0: Int, _iN: Int +) +extends TableStepperBase[Long, I, LongStepper, LongTableStepper[I]](_maxLength, _table, _i0, _iN) +with LongStepper { + def nextStep(): Long = + if (hasStep) { + val ans = extract(myCurrent) + myCurrent = iterate(myCurrent) + ans + } + else Stepper.throwNSEE() + + def semiclone(half: Int): LongTableStepper[I] = new LongTableStepper[I](maxLength, table, iterate, extract, i0, half) +} + diff --git a/tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala new file mode 100644 index 000000000000..332ec65d85fd --- /dev/null +++ b/tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala @@ -0,0 +1,131 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.convert +package impl + +import scala.collection._ + +private[convert] abstract class VectorStepperBase[Sub >: Null, Semi <: Sub]( + _i0: Int, + _iN: Int, + protected val displayN: Int, + protected val trunk: Array[AnyRef] +) +extends IndexedStepperBase[Sub, Semi](_i0, _iN) { + protected var index: Int = 32 // Force an advanceData on the first element + protected var leaves: Array[AnyRef] = null + protected var index1: Int = 32 // Force advanceData to defer to initTo on the first element + protected var twigs: Array[AnyRef] = null + + protected final def advanceData(iX: Int): Unit = { + index1 += 1 + if (index1 >= 32) initTo(iX) + else { + leaves = twigs(index1).asInstanceOf[Array[AnyRef]] + index = 0 + } + } + protected final def initTo(iX: Int): Unit = displayN match { + case 0 => + leaves = trunk + index = iX + case 1 => + twigs = trunk + index1 = iX >>> 5 + leaves = twigs(index1).asInstanceOf[Array[AnyRef]] + index = iX & 0x1F + case _ => + var n = displayN + var dataN = trunk + while (n > 2) { + dataN = dataN((iX >> (5*n)) & 0x1F).asInstanceOf[Array[AnyRef]] + n -= 1 + } + twigs = dataN((iX >>> 10) & 0x1F).asInstanceOf[Array[AnyRef]] + index1 = (iX >> 5) & 0x1F + leaves = twigs(index1).asInstanceOf[Array[AnyRef]] + index = iX & 0x1F + } +} + +private[collection] class AnyVectorStepper[A](_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) +extends VectorStepperBase[AnyStepper[A], AnyVectorStepper[A]](_i0, _iN, _displayN, _trunk) +with AnyStepper[A] { + def nextStep(): A = if (hasStep) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + leaves(index).asInstanceOf[A] + } else Stepper.throwNSEE() + def semiclone(half: Int): AnyVectorStepper[A] = { + val ans = new AnyVectorStepper[A](i0, half, displayN, trunk) + index = 32 + index1 = 32 + i0 = half + ans + } +} + +private[collection] class DoubleVectorStepper(_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) +extends VectorStepperBase[DoubleStepper, DoubleVectorStepper](_i0, _iN, _displayN, _trunk) +with DoubleStepper { + def nextStep(): Double = if (hasStep) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + leaves(index).asInstanceOf[Double] + } else Stepper.throwNSEE() + def semiclone(half: Int): DoubleVectorStepper = { + val ans = new DoubleVectorStepper(i0, half, displayN, trunk) + index = 32 + index1 = 32 + i0 = half + ans + } +} + +private[collection] class IntVectorStepper(_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) +extends VectorStepperBase[IntStepper, IntVectorStepper](_i0, _iN, _displayN, _trunk) +with IntStepper { + def nextStep(): Int = if (hasStep) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + leaves(index).asInstanceOf[Int] + } else Stepper.throwNSEE() + def semiclone(half: Int): IntVectorStepper = { + val ans = new IntVectorStepper(i0, half, displayN, trunk) + index = 32 + index1 = 32 + i0 = half + ans + } +} + +private[collection] class LongVectorStepper(_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) +extends VectorStepperBase[LongStepper, LongVectorStepper](_i0, _iN, _displayN, _trunk) +with LongStepper { + def nextStep(): Long = if (hasStep) { + index += 1 + if (index >= 32) advanceData(i0) + i0 += 1 + leaves(index).asInstanceOf[Long] + } else Stepper.throwNSEE() + def semiclone(half: Int): LongVectorStepper = { + val ans = new LongVectorStepper(i0, half, displayN, trunk) + index = 32 + index1 = 32 + i0 = half + ans + } +} diff --git a/tests/pos-special/stdlib/collection/generic/BitOperations.scala b/tests/pos-special/stdlib/collection/generic/BitOperations.scala new file mode 100644 index 000000000000..4c64dec9dc1f --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/BitOperations.scala @@ -0,0 +1,50 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package generic + + +/** Some bit operations. + * + * See [[https://www.drmaciver.com/2008/08/unsigned-comparison-in-javascala/]] for + * an explanation of unsignedCompare. + */ +private[collection] object BitOperations { + trait Int { + type Int = scala.Int + def zero(i: Int, mask: Int) = (i & mask) == 0 + def mask(i: Int, mask: Int) = i & (complement(mask - 1) ^ mask) + def hasMatch(key: Int, prefix: Int, m: Int) = mask(key, m) == prefix + def unsignedCompare(i: Int, j: Int) = (i < j) ^ (i < 0) ^ (j < 0) + def shorter(m1: Int, m2: Int) = unsignedCompare(m2, m1) + def complement(i: Int) = (-1) ^ i + def bits(num: Int) = 31 to 0 by -1 map (i => (num >>> i & 1) != 0) + def bitString(num: Int, sep: String = "") = bits(num) map (b => if (b) "1" else "0") mkString sep + def highestOneBit(j: Int) = java.lang.Integer.highestOneBit(j) + } + object Int extends Int + + trait Long { + type Long = scala.Long + def zero(i: Long, mask: Long) = (i & mask) == 0L + def mask(i: Long, mask: Long) = i & (complement(mask - 1) ^ mask) + def hasMatch(key: Long, prefix: Long, m: Long) = mask(key, m) == prefix + def unsignedCompare(i: Long, j: Long) = (i < j) ^ (i < 0L) ^ (j < 0L) + def shorter(m1: Long, m2: Long) = unsignedCompare(m2, m1) + def complement(i: Long) = (-1L) ^ i + def bits(num: Long) = 63L to 0L by -1L map (i => (num >>> i & 1L) != 0L) + def bitString(num: Long, sep: String = "") = bits(num) map (b => if (b) "1" else "0") mkString sep + def highestOneBit(j: Long) = java.lang.Long.highestOneBit(j) + } + object Long extends Long +} diff --git a/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala b/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala new file mode 100644 index 000000000000..69b4b3d96e61 --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala @@ -0,0 +1,87 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.generic + +import java.io.{ObjectInputStream, ObjectOutputStream} + +import scala.collection.{Factory, Iterable} +import scala.collection.mutable.Builder + +/** The default serialization proxy for collection implementations. + * + * This class is `final` and requires an extra `Factory` object rather than leaving the details of creating a `Builder` + * to an abstract method that could be implemented by a subclass. This is necessary because the factory is needed + * for deserializing this class's private state, which happens before any subclass fields would be deserialized. Any + * additional state required to create the proper `Builder` needs to be captured by the `factory`. + */ +@SerialVersionUID(3L) +final class DefaultSerializationProxy[A](factory: Factory[A, Any], @transient private[this] val coll: Iterable[A]) extends Serializable { + + @transient protected var builder: Builder[A, Any] = _ + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + val k = coll.knownSize + out.writeInt(k) + var count = 0 + coll.foreach { x => + out.writeObject(x) + count += 1 + } + if(k >= 0) { + if(count != k) throw new IllegalStateException(s"Illegal size $count of collection, expected $k") + } else out.writeObject(SerializeEnd) + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + builder = factory.newBuilder + val k = in.readInt() + if(k >= 0) { + builder.sizeHint(k) + var count = 0 + while(count < k) { + builder += in.readObject().asInstanceOf[A] + count += 1 + } + } else { + while (true) in.readObject match { + case SerializeEnd => return + case a => builder += a.asInstanceOf[A] + } + } + } + + protected[this] def readResolve(): Any = builder.result() +} + +@SerialVersionUID(3L) +private[collection] case object SerializeEnd + +/** Mix-in trait to enable DefaultSerializationProxy for the standard collection types. Depending on the type + * it is mixed into, it will dynamically choose `iterableFactory`, `mapFactory`, `sortedIterableFactory` or + * `sortedMapFactory` for deserialization into the respective `CC` type. Override `writeReplace` or implement + * it directly without using this trait if you need a non-standard factory or if you want to use a different + * serialization scheme. + */ +trait DefaultSerializable extends Serializable { this: scala.collection.Iterable[_] => + protected[this] def writeReplace(): AnyRef = { + val f: Factory[Any, Any] = this match { + case it: scala.collection.SortedMap[_, _] => it.sortedMapFactory.sortedMapFactory[Any, Any](it.ordering.asInstanceOf[Ordering[Any]]).asInstanceOf[Factory[Any, Any]] + case it: scala.collection.Map[_, _] => it.mapFactory.mapFactory[Any, Any].asInstanceOf[Factory[Any, Any]] + case it: scala.collection.SortedSet[_] => it.sortedIterableFactory.evidenceIterableFactory[Any](it.ordering.asInstanceOf[Ordering[Any]]) + case it => it.iterableFactory.iterableFactory + } + new DefaultSerializationProxy(f, this) + } +} diff --git a/tests/pos-special/stdlib/collection/generic/IsIterable.scala b/tests/pos-special/stdlib/collection/generic/IsIterable.scala new file mode 100644 index 000000000000..bf2eab6bb2a6 --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/IsIterable.scala @@ -0,0 +1,164 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package generic + +/** A trait which can be used to avoid code duplication when defining extension + * methods that should be applicable both to existing Scala collections (i.e., + * types extending `Iterable`) as well as other (potentially user-defined) + * types that could be converted to a Scala collection type. This trait + * makes it possible to treat Scala collections and types that can be implicitly + * converted to a collection type uniformly. For example, one can provide + * extension methods that work both on collection types and on `String`s (`String`s + * do not extend `Iterable`, but can be converted to `Iterable`) + * + * `IsIterable` provides three members: + * + * 1. type member `A`, which represents the element type of the target `Iterable[A]` + * 1. type member `C`, which represents the type returned by transformation operations that preserve the collection’s elements type + * 1. method `apply`, which provides a way to convert between the type we wish to add extension methods to, `Repr`, and `IterableOps[A, Iterable, C]`. + * + * ===Usage=== + * + * One must provide `IsIterable` as an implicit parameter type of an implicit + * conversion. Its usage is shown below. Our objective in the following example + * is to provide a generic extension method `mapReduce` to any type that extends + * or can be converted to `Iterable`. In our example, this includes + * `String`. + * + * {{{ + * import scala.collection.{Iterable, IterableOps} + * import scala.collection.generic.IsIterable + * + * class ExtensionMethods[Repr, I <: IsIterable[Repr]](coll: Repr, it: I) { + * def mapReduce[B](mapper: it.A => B)(reducer: (B, B) => B): B = { + * val iter = it(coll).iterator + * var res = mapper(iter.next()) + * while (iter.hasNext) + * res = reducer(res, mapper(iter.next())) + * res + * } + * } + * + * implicit def withExtensions[Repr](coll: Repr)(implicit it: IsIterable[Repr]): ExtensionMethods[Repr, it.type] = + * new ExtensionMethods(coll, it) + * + * // See it in action! + * List(1, 2, 3).mapReduce(_ * 2)(_ + _) // res0: Int = 12 + * "Yeah, well, you know, that's just, like, your opinion, man.".mapReduce(x => 1)(_ + _) // res1: Int = 59 + *}}} + * + * Here, we begin by creating a class `ExtensionMethods` which contains our + * `mapReduce` extension method. + * + * Note that `ExtensionMethods` takes a constructor argument `coll` of type `Repr`, where + * `Repr` represents (typically) the collection type, and an argument `it` of a subtype of `IsIterable[Repr]`. + * The body of the method starts by converting the `coll` argument to an `IterableOps` in order to + * call the `iterator` method on it. + * The remaining of the implementation is straightforward. + * + * The `withExtensions` implicit conversion makes the `mapReduce` operation available + * on any type `Repr` for which it exists an implicit `IsIterable[Repr]` instance. + * Note how we keep track of the precise type of the implicit `it` argument by using the + * `it.type` singleton type, rather than the wider `IsIterable[Repr]` type. We do that + * so that the information carried by the type members `A` and `C` of the `it` argument + * is not lost. + * + * When the `mapReduce` method is called on some type of which it is not + * a member, implicit search is triggered. Because implicit conversion + * `withExtensions` is generic, it will be applied as long as an implicit + * value of type `IsIterable[Repr]` can be found. Given that the + * `IsIterable` companion object contains implicit members that return values of type + * `IsIterable`, this requirement is typically satisfied, and the chain + * of interactions described in the previous paragraph is set into action. + * (See the `IsIterable` companion object, which contains a precise + * specification of the available implicits.) + * + * ''Note'': Currently, it's not possible to combine the implicit conversion and + * the class with the extension methods into an implicit class due to + * limitations of type inference. + * + * ===Implementing `IsIterable` for New Types=== + * + * One must simply provide an implicit value of type `IsIterable` + * specific to the new type, or an implicit conversion which returns an + * instance of `IsIterable` specific to the new type. + * + * Below is an example of an implementation of the `IsIterable` trait + * where the `Repr` type is `Range`. + * + *{{{ + * implicit val rangeRepr: IsIterable[Range] { type A = Int; type C = IndexedSeq[Int] } = + * new IsIterable[Range] { + * type A = Int + * type C = IndexedSeq[Int] + * def apply(coll: Range): IterableOps[Int, IndexedSeq, IndexedSeq[Int]] = coll + * } + *}}} + * + * (Note that in practice the `IsIterable[Range]` instance is already provided by + * the standard library, and it is defined as an `IsSeq[Range]` instance) + */ +trait IsIterable[Repr] extends IsIterableOnce[Repr] { + + /** The type returned by transformation operations that preserve the same elements + * type (e.g. `filter`, `take`). + * + * In practice, this type is often `Repr` itself, excepted in the case + * of `SeqView[A]` (and other `View[A]` subclasses), where it is “only” `View[A]`. + */ + type C + + @deprecated("'conversion' is now a method named 'apply'", "2.13.0") + override val conversion: Repr => IterableOps[A, Iterable, C] = apply(_) + + /** A conversion from the type `Repr` to `IterableOps[A, Iterable, C]` */ + def apply(coll: Repr): IterableOps[A, Iterable, C] + +} + +object IsIterable extends IsIterableLowPriority { + + // Straightforward case: IterableOps subclasses + implicit def iterableOpsIsIterable[A0, CC0[X] <: IterableOps[X, Iterable, CC0[X]]]: IsIterable[CC0[A0]] { type A = A0; type C = CC0[A0] } = + new IsIterable[CC0[A0]] { + type A = A0 + type C = CC0[A0] + def apply(coll: CC0[A]): IterableOps[A, Iterable, C] = coll + } + + // The `BitSet` type can not be unified with the `CC0` parameter of + // the above definition because it does not take a type parameter. + // Hence the need for a separate case: + implicit def bitSetOpsIsIterable[C0 <: BitSet with BitSetOps[C0]]: IsIterable[C0] { type A = Int; type C = C0 } = + new IsIterable[C0] { + type A = Int + type C = C0 + def apply(coll: C0): IterableOps[Int, Iterable, C0] = coll + } + +} + +trait IsIterableLowPriority { + + // Makes `IsSeq` instances visible in `IsIterable` companion + implicit def isSeqLikeIsIterable[Repr](implicit + isSeqLike: IsSeq[Repr] + ): IsIterable[Repr] { type A = isSeqLike.A; type C = isSeqLike.C } = isSeqLike + + // Makes `IsMap` instances visible in `IsIterable` companion + implicit def isMapLikeIsIterable[Repr](implicit + isMapLike: IsMap[Repr] + ): IsIterable[Repr] { type A = isMapLike.A; type C = isMapLike.C } = isMapLike + +} diff --git a/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala b/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala new file mode 100644 index 000000000000..7d7293037bd4 --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala @@ -0,0 +1,71 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package generic + +/** Type class witnessing that a collection representation type `Repr` has + * elements of type `A` and has a conversion to `IterableOnce[A]`. + * + * This type enables simple enrichment of `IterableOnce`s with extension + * methods which can make full use of the mechanics of the Scala collections + * framework in their implementation. + * + * Example usage, + * {{{ + * class FilterMapImpl[Repr, I <: IsIterableOnce[Repr]](coll: Repr, it: I) { + * final def filterMap[B, That](f: it.A => Option[B])(implicit bf: BuildFrom[Repr, B, That]): That = { + * val b = bf.newBuilder(coll) + * for(e <- it(coll).iterator) f(e) foreach (b +=) + * b.result() + * } + * } + * implicit def filterMap[Repr](coll: Repr)(implicit it: IsIterableOnce[Repr]): FilterMapImpl[Repr, it.type] = + * new FilterMapImpl(coll, it) + * + * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None) + * // == List(2, 4) + * }}} + */ +trait IsIterableOnce[Repr] { + + /** The type of elements we can traverse over (e.g. `Int`). */ + type A + + @deprecated("'conversion' is now a method named 'apply'", "2.13.0") + val conversion: Repr => IterableOnce[A] = apply(_) + + /** A conversion from the representation type `Repr` to a `IterableOnce[A]`. */ + def apply(coll: Repr): IterableOnce[A] + +} + +object IsIterableOnce extends IsIterableOnceLowPriority { + + // Straightforward case: IterableOnce subclasses + implicit def iterableOnceIsIterableOnce[CC0[A] <: IterableOnce[A], A0]: IsIterableOnce[CC0[A0]] { type A = A0 } = + new IsIterableOnce[CC0[A0]] { + type A = A0 + def apply(coll: CC0[A0]): IterableOnce[A0] = coll + } + +} + +trait IsIterableOnceLowPriority { + + // Makes `IsIterable` instance visible in `IsIterableOnce` companion + implicit def isIterableLikeIsIterableOnce[Repr](implicit + isIterableLike: IsIterable[Repr] + ): IsIterableOnce[Repr] { type A = isIterableLike.A } = isIterableLike + +} diff --git a/tests/pos-special/stdlib/collection/generic/IsMap.scala b/tests/pos-special/stdlib/collection/generic/IsMap.scala new file mode 100644 index 000000000000..19f75cf7bced --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/IsMap.scala @@ -0,0 +1,114 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package generic + +import IsMap.Tupled +import scala.collection.immutable.{IntMap, LongMap} + +/** + * Type class witnessing that a collection type `Repr` + * has keys of type `K`, values of type `V` and has a conversion to + * `MapOps[K, V, Iterable, C]`, for some types `K`, `V` and `C`. + * + * This type enables simple enrichment of `Map`s with extension methods. + * + * @see [[scala.collection.generic.IsIterable]] + * @tparam Repr Collection type (e.g. `Map[Int, String]`) + */ +trait IsMap[Repr] extends IsIterable[Repr] { + + /** The type of keys */ + type K + + /** The type of values */ + type V + + type A = (K, V) + + /** A conversion from the type `Repr` to `MapOps[K, V, Iterable, C]` + * + * @note The third type parameter of the returned `MapOps` value is + * still `Iterable` (and not `Map`) because `MapView[K, V]` only + * extends `MapOps[K, V, View, View[A]]`. + */ + override def apply(c: Repr): MapOps[K, V, Tupled[Iterable]#Ap, C] + +} + +object IsMap { + + /** Convenient type level function that takes a unary type constructor `F[_]` + * and returns a binary type constructor that tuples its parameters and passes + * them to `F`. + * + * `Tupled[F]#Ap` is equivalent to `({ type Ap[X, +Y] = F[(X, Y)] })#Ap`. + */ + type Tupled[F[+_]] = { type Ap[X, Y] = F[(X, Y)] } + + // Map collections + implicit def mapOpsIsMap[CC0[X, Y] <: MapOps[X, Y, Tupled[Iterable]#Ap, CC0[X, Y]], K0, V0]: IsMap[CC0[K0, V0]] { type K = K0; type V = V0; type C = CC0[K, V] } = + new IsMap[CC0[K0, V0]] { + type K = K0 + type V = V0 + type C = CC0[K0, V0] + def apply(c: CC0[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, C] = c + } + + // MapView + implicit def mapViewIsMap[CC0[X, Y] <: MapView[X, Y], K0, V0]: IsMap[CC0[K0, V0]] { type K = K0; type V = V0; type C = View[(K0, V0)] } = + new IsMap[CC0[K0, V0]] { + type K = K0 + type V = V0 + type C = View[(K, V)] + def apply(c: CC0[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, View[(K0, V0)]] = c + } + + // AnyRefMap has stricter bounds than the ones used by the mapOpsIsMap definition + implicit def anyRefMapIsMap[K0 <: AnyRef, V0]: IsMap[mutable.AnyRefMap[K0, V0]] { type K = K0; type V = V0; type C = mutable.AnyRefMap[K0, V0] } = + new IsMap[mutable.AnyRefMap[K0, V0]] { + type K = K0 + type V = V0 + type C = mutable.AnyRefMap[K0, V0] + def apply(c: mutable.AnyRefMap[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, mutable.AnyRefMap[K0, V0]] = c + } + + // IntMap takes one type parameter only whereas mapOpsIsMap uses a parameter CC0 with two type parameters + implicit def intMapIsMap[V0]: IsMap[IntMap[V0]] { type K = Int; type V = V0; type C = IntMap[V0] } = + new IsMap[IntMap[V0]] { + type K = Int + type V = V0 + type C = IntMap[V0] + def apply(c: IntMap[V0]): MapOps[Int, V0, Tupled[Iterable]#Ap, IntMap[V0]] = c + } + + // LongMap is in a similar situation as IntMap + implicit def longMapIsMap[V0]: IsMap[LongMap[V0]] { type K = Long; type V = V0; type C = LongMap[V0] } = + new IsMap[LongMap[V0]] { + type K = Long + type V = V0 + type C = LongMap[V0] + def apply(c: LongMap[V0]): MapOps[Long, V0, Tupled[Iterable]#Ap, LongMap[V0]] = c + } + + // mutable.LongMap is in a similar situation as LongMap and IntMap + implicit def mutableLongMapIsMap[V0]: IsMap[mutable.LongMap[V0]] { type K = Long; type V = V0; type C = mutable.LongMap[V0] } = + new IsMap[mutable.LongMap[V0]] { + type K = Long + type V = V0 + type C = mutable.LongMap[V0] + def apply(c: mutable.LongMap[V0]): MapOps[Long, V0, Tupled[Iterable]#Ap, mutable.LongMap[V0]] = c + } + + +} diff --git a/tests/pos-special/stdlib/collection/generic/IsSeq.scala b/tests/pos-special/stdlib/collection/generic/IsSeq.scala new file mode 100644 index 000000000000..69ea27d087d1 --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/IsSeq.scala @@ -0,0 +1,114 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package generic + +import scala.reflect.ClassTag + +/** Type class witnessing that a collection representation type `Repr` has + * elements of type `A` and has a conversion to `SeqOps[A, Iterable, C]`, for + * some types `A` and `C`. + * + * This type enables simple enrichment of `Seq`s with extension methods which + * can make full use of the mechanics of the Scala collections framework in + * their implementation. + * + * @see [[scala.collection.generic.IsIterable]] + */ +trait IsSeq[Repr] extends IsIterable[Repr] { + + @deprecated("'conversion' is now a method named 'apply'", "2.13.0") + override val conversion: Repr => SeqOps[A, Iterable, C] = apply(_) + + /** A conversion from the type `Repr` to `SeqOps[A, Iterable, C]` + * + * @note The second type parameter of the returned `SeqOps` value is + * still `Iterable` (and not `Seq`) because `SeqView[A]` only + * extends `SeqOps[A, View, View[A]]`. + */ + def apply(coll: Repr): SeqOps[A, Iterable, C] +} + +object IsSeq { + + private val seqOpsIsSeqVal: IsSeq[Seq[Any]] = + new IsSeq[Seq[Any]] { + type A = Any + type C = Any + def apply(coll: Seq[Any]): SeqOps[Any, Iterable, Any] = coll + } + + implicit def seqOpsIsSeq[CC0[X] <: SeqOps[X, Iterable, CC0[X]], A0]: IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] } = + seqOpsIsSeqVal.asInstanceOf[IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] }] + + implicit def seqViewIsSeq[CC0[X] <: SeqView[X], A0]: IsSeq[CC0[A0]] { type A = A0; type C = View[A0] } = + new IsSeq[CC0[A0]] { + type A = A0 + type C = View[A] + def apply(coll: CC0[A0]): SeqOps[A0, View, View[A0]] = coll + } + + implicit val stringIsSeq: IsSeq[String] { type A = Char; type C = String } = + new IsSeq[String] { + type A = Char + type C = String + def apply(s: String): SeqOps[Char, immutable.IndexedSeq, String] = + new SeqOps[Char, immutable.ArraySeq, String] { + def length: Int = s.length + def apply(i: Int): Char = s.charAt(i) + def toIterable: Iterable[Char] = new immutable.WrappedString(s) + protected[this] def coll: String = s + protected[this] def fromSpecific(coll: IterableOnce[Char]): String = coll.iterator.mkString + def iterableFactory: IterableFactory[immutable.ArraySeq] = immutable.ArraySeq.untagged + override def empty: String = "" + protected[this] def newSpecificBuilder: mutable.Builder[Char, String] = new StringBuilder + def iterator: Iterator[Char] = s.iterator + } + } + + implicit val stringViewIsSeq: IsSeq[StringView] { type A = Char; type C = View[Char] } = + new IsSeq[StringView] { + type A = Char + type C = View[Char] + def apply(coll: StringView): SeqOps[Char, View, View[Char]] = coll + } + + implicit def arrayIsSeq[A0 : ClassTag]: IsSeq[Array[A0]] { type A = A0; type C = Array[A0] } = + new IsSeq[Array[A0]] { + type A = A0 + type C = Array[A0] + def apply(a: Array[A0]): SeqOps[A0, Seq, Array[A0]] = + new SeqOps[A, mutable.ArraySeq, Array[A]] { + def apply(i: Int): A = a(i) + def length: Int = a.length + def toIterable: Iterable[A] = mutable.ArraySeq.make(a) + protected def coll: Array[A] = a + protected def fromSpecific(coll: IterableOnce[A]): Array[A] = Array.from(coll) + def iterableFactory: IterableFactory[mutable.ArraySeq] = mutable.ArraySeq.untagged + override def empty: Array[A] = Array.empty[A] + protected def newSpecificBuilder: mutable.Builder[A, Array[A]] = Array.newBuilder + def iterator: Iterator[A] = a.iterator + } + } + + // `Range` can not be unified with the `CC0` parameter of the + // `seqOpsIsSeq` definition because it does not take a type parameter. + // Hence the need for a separate case: + implicit def rangeIsSeq[C0 <: Range]: IsSeq[C0] { type A = Int; type C = immutable.IndexedSeq[Int] } = + new IsSeq[C0] { + type A = Int + type C = immutable.IndexedSeq[Int] + def apply(coll: C0): SeqOps[Int, Seq, immutable.IndexedSeq[Int]] = coll + } + +} diff --git a/tests/pos-special/stdlib/collection/generic/Subtractable.scala b/tests/pos-special/stdlib/collection/generic/Subtractable.scala new file mode 100644 index 000000000000..223997f4e972 --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/Subtractable.scala @@ -0,0 +1,62 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package generic + +/** This trait represents collection-like objects that can be reduced + * using a '+' operator. It defines variants of `-` and `--` + * as convenience methods in terms of single-element removal `-`. + * + * @tparam A the type of the elements of the $coll. + * @tparam Repr the type of the $coll itself + * @define coll collection + * @define Coll Subtractable + */ +@deprecated("Subtractable is deprecated. This is now implemented as part of SetOps, MapOps, etc.", "2.13.0") +trait Subtractable[A, +Repr <: Subtractable[A, Repr]] { self => + + /** The representation object of type `Repr` which contains the collection's elements + */ + protected def repr: Repr + + /** Creates a new $coll from this $coll with an element removed. + * @param elem the element to remove + * @return a new collection that contains all elements of the current $coll + * except one less occurrence of `elem`. + */ + def -(elem: A): Repr + + /** Creates a new $coll from this $coll with some elements removed. + * + * This method takes two or more elements to be removed. Another overloaded + * variant of this method handles the case where a single element is + * removed. + * @param elem1 the first element to remove. + * @param elem2 the second element to remove. + * @param elems the remaining elements to remove. + * @return a new $coll that contains all elements of the current $coll + * except one less occurrence of each of the given elements. + */ + def -(elem1: A, elem2: A, elems: A*): Repr = + this - elem1 - elem2 -- elems + + /** Creates a new $coll from this $coll by removing all elements of another + * collection. + * + * @param xs the collection containing the removed elements. + * @return a new $coll that contains all elements of the current $coll + * except one less occurrence of each of the elements of `elems`. + */ + def --(xs: IterableOnce[A]): Repr = (repr /: xs.iterator) (_ - _) +} diff --git a/tests/pos-special/stdlib/collection/generic/package.scala b/tests/pos-special/stdlib/collection/generic/package.scala new file mode 100644 index 000000000000..0c16aa04dc98 --- /dev/null +++ b/tests/pos-special/stdlib/collection/generic/package.scala @@ -0,0 +1,34 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + + +package object generic { + @deprecated("Clearable was moved from collection.generic to collection.mutable", "2.13.0") + type Clearable = scala.collection.mutable.Clearable + + @deprecated("Use scala.collection.BuildFrom instead", "2.13.0") + type CanBuildFrom[-From, -A, +C] = scala.collection.BuildFrom[From, A, C] + + @deprecated("Growable was moved from collection.generic to collection.mutable", "2.13.0") + type Growable[-A] = scala.collection.mutable.Growable[A] + + @deprecated("Shrinkable was moved from collection.generic to collection.mutable", "2.13.0") + type Shrinkable[-A] = scala.collection.mutable.Shrinkable[A] + + @deprecated("Use IsIterable instead", "2.13.0") + type IsTraversableLike[Repr] = IsIterable[Repr] + + @deprecated("Use IsIterableOnce instead", "2.13.0") + type IsTraversableOnce[Repr] = IsIterableOnce[Repr] +} diff --git a/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala b/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala new file mode 100644 index 000000000000..978c63034f4a --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala @@ -0,0 +1,685 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import java.util.Arrays + +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.Stepper.EfficientSplit +import scala.collection.mutable.{ArrayBuffer, ArrayBuilder, Builder, ArraySeq => MutableArraySeq} +import scala.collection.convert.impl._ +import scala.reflect.ClassTag +import scala.runtime.ScalaRunTime +import scala.util.Sorting +import scala.util.hashing.MurmurHash3 + +/** + * An immutable array. + * + * Supports efficient indexed access and has a small memory footprint. + * + * @define coll immutable array + * @define Coll `ArraySeq` + */ +sealed abstract class ArraySeq[+A] + extends AbstractSeq[A] + with IndexedSeq[A] + with IndexedSeqOps[A, ArraySeq, ArraySeq[A]] + with StrictOptimizedSeqOps[A, ArraySeq, ArraySeq[A]] + with EvidenceIterableFactoryDefaults[A, ArraySeq, ClassTag] + with Serializable { + + /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive + * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype + * or subtype of the element type. */ + protected def elemTag: ClassTag[_] + + override def iterableFactory: SeqFactory[ArraySeq] = ArraySeq.untagged + + /** The wrapped mutable `Array` that backs this `ArraySeq`. Any changes to this array will break + * the expected immutability. Its element type does not have to be equal to the element type of this ArraySeq. + * A primitive ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an + * array of a supertype or subtype of the element type. */ + def unsafeArray: Array[_] + + protected def evidenceIterableFactory: ArraySeq.type = ArraySeq + protected def iterableEvidence: ClassTag[A @uncheckedVariance] = elemTag.asInstanceOf[ClassTag[A]] + + def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit + + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): A + + override def updated[B >: A](index: Int, elem: B): ArraySeq[B] = { + val dest = new Array[Any](length) + Array.copy(unsafeArray, 0, dest, 0, length) + dest(index) = elem + ArraySeq.unsafeWrapArray(dest).asInstanceOf[ArraySeq[B]] + } + + override def map[B](f: A => B): ArraySeq[B] = { + val a = new Array[Any](size) + var i = 0 + while (i < a.length){ + a(i) = f(apply(i)) + i += 1 + } + ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] + } + + override def prepended[B >: A](elem: B): ArraySeq[B] = + ArraySeq.unsafeWrapArray(unsafeArray.prepended[Any](elem)).asInstanceOf[ArraySeq[B]] + + override def appended[B >: A](elem: B): ArraySeq[B] = + ArraySeq.unsafeWrapArray(unsafeArray.appended[Any](elem)).asInstanceOf[ArraySeq[B]] + + /** Fast concatenation of two [[ArraySeq]]s. + * + * @return null if optimisation not possible. + */ + private def appendedAllArraySeq[B >: A](that: ArraySeq[B]): ArraySeq[B] = { + // Optimise concatenation of two ArraySeqs + // For ArraySeqs with sizes of [100, 1000, 10000] this is [3.5, 4.1, 5.2]x as fast + if (isEmpty) + that + else if (that.isEmpty) + this + else { + val thisIsObj = this.unsafeArray.isInstanceOf[Array[AnyRef]] + val thatIsObj = that.unsafeArray.isInstanceOf[Array[AnyRef]] + val mismatch = thisIsObj != thatIsObj + if (mismatch) + // Combining primatives and objects: abort + null + else if (thisIsObj) { + // A and B are objects + val ax = this.unsafeArray.asInstanceOf[Array[A]] + val ay = that.unsafeArray.asInstanceOf[Array[B]] + val len = ax.length + ay.length + val a = new Array[AnyRef](len) + System.arraycopy(ax, 0, a, 0, ax.length) + System.arraycopy(ay, 0, a, ax.length, ay.length) + ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] + } else { + // A is a primative and B = A. Use this instance's protected ClassTag. + val ax = this.unsafeArray.asInstanceOf[Array[A]] + val ay = that.unsafeArray.asInstanceOf[Array[A]] + val len = ax.length + ay.length + val a = iterableEvidence.newArray(len) + System.arraycopy(ax, 0, a, 0, ax.length) + System.arraycopy(ay, 0, a, ax.length, ay.length) + ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] + } + } + } + + override def appendedAll[B >: A](suffix: collection.IterableOnce[B]): ArraySeq[B] = { + def genericResult = { + val k = suffix.knownSize + if (k == 0) this + else { + val b = ArrayBuilder.make[Any] + if(k >= 0) b.sizeHint(k + unsafeArray.length) + b.addAll(unsafeArray) + b.addAll(suffix) + ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[ArraySeq[B]] + } + } + + suffix match { + case that: ArraySeq[_] => + val result = appendedAllArraySeq(that.asInstanceOf[ArraySeq[B]]) + if (result == null) genericResult + else result + case _ => + genericResult + } + } + + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): ArraySeq[B] = { + def genericResult = { + val k = prefix.knownSize + if (k == 0) this + else { + val b = ArrayBuilder.make[Any] + if(k >= 0) b.sizeHint(k + unsafeArray.length) + b.addAll(prefix) + if(k < 0) b.sizeHint(b.length + unsafeArray.length) + b.addAll(unsafeArray) + ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[ArraySeq[B]] + } + } + + prefix match { + case that: ArraySeq[_] => + val result = that.asInstanceOf[ArraySeq[B]].appendedAllArraySeq(this) + if (result == null) genericResult + else result + case _ => + genericResult + } + } + + override def zip[B](that: collection.IterableOnce[B]): ArraySeq[(A, B)] = + that match { + case bs: ArraySeq[B] => + ArraySeq.tabulate(length min bs.length) { i => + (apply(i), bs(i)) + } + case _ => + strictOptimizedZip[B, ArraySeq[(A, B)]](that, iterableFactory.newBuilder) + } + + override def take(n: Int): ArraySeq[A] = + if (unsafeArray.length <= n) + this + else + ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).take(n)).asInstanceOf[ArraySeq[A]] + + override def takeRight(n: Int): ArraySeq[A] = + if (unsafeArray.length <= n) + this + else + ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).takeRight(n)).asInstanceOf[ArraySeq[A]] + + override def drop(n: Int): ArraySeq[A] = + if (n <= 0) + this + else + ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).drop(n)).asInstanceOf[ArraySeq[A]] + + override def dropRight(n: Int): ArraySeq[A] = + if (n <= 0) + this + else + ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).dropRight(n)).asInstanceOf[ArraySeq[A]] + + override def slice(from: Int, until: Int): ArraySeq[A] = + if (from <= 0 && unsafeArray.length <= until) + this + else + ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).slice(from, until)).asInstanceOf[ArraySeq[A]] + + override def foldLeft[B](z: B)(f: (B, A) => B): B = { + // For ArraySeqs with sizes of [100, 1000, 10000] this is [1.3, 1.8, 1.8]x as fast + // as the same while-loop over this instead of unsafeArray. + val array = unsafeArray + var b = z + var i = 0 + while (i < array.length) { + val a = array(i).asInstanceOf[A] + b = f(b, a) + i += 1 + } + b + } + + override def foldRight[B](z: B)(f: (A, B) => B): B = { + // For ArraySeqs with sizes of [100, 1000, 10000] this is [1.6, 1.8, 2.7]x as fast + // as the same while-loop over this instead of unsafeArray. + val array = unsafeArray + var b = z + var i = array.length + while (i > 0) { + i -= 1 + val a = array(i).asInstanceOf[A] + b = f(a, b) + } + b + } + + override def tail: ArraySeq[A] = ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).tail).asInstanceOf[ArraySeq[A]] + + override def reverse: ArraySeq[A] = ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).reverse).asInstanceOf[ArraySeq[A]] + + override protected[this] def className = "ArraySeq" + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if(copied > 0) { + Array.copy(unsafeArray, 0, xs, start, copied) + } + copied + } + + override protected final def applyPreferredMaxLength: Int = Int.MaxValue + + override def sorted[B >: A](implicit ord: Ordering[B]): ArraySeq[A] = + if(unsafeArray.length <= 1) this + else { + val a = Array.copyAs[AnyRef](unsafeArray, length)(ClassTag.AnyRef) + Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]) + new ArraySeq.ofRef[AnyRef](a).asInstanceOf[ArraySeq[A]] + } +} + +/** + * $factoryInfo + * @define coll immutable array + * @define Coll `ArraySeq` + */ +@SerialVersionUID(3L) +object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => + val untagged: SeqFactory[ArraySeq] = new ClassTagSeqFactory.AnySeqDelegate(self) + + private[this] lazy val emptyImpl = new ArraySeq.ofRef[Nothing](new Array[Nothing](0)) + + def empty[A : ClassTag]: ArraySeq[A] = emptyImpl + + def from[A](it: scala.collection.IterableOnce[A])(implicit tag: ClassTag[A]): ArraySeq[A] = it match { + case as: ArraySeq[A] => as + case _ => unsafeWrapArray(Array.from[A](it)) + } + + def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] = + ArrayBuffer.newBuilder[A].mapResult(b => unsafeWrapArray[A](b.toArray)) + + override def fill[A : ClassTag](n: Int)(elem: => A): ArraySeq[A] = tabulate(n)(_ => elem) + + override def tabulate[A : ClassTag](n: Int)(f: Int => A): ArraySeq[A] = { + val elements = Array.ofDim[A](scala.math.max(n, 0)) + var i = 0 + while (i < n) { + ScalaRunTime.array_update(elements, i, f(i)) + i = i + 1 + } + ArraySeq.unsafeWrapArray(elements) + } + + /** + * Wrap an existing `Array` into an `ArraySeq` of the proper primitive specialization type + * without copying. Any changes to wrapped array will break the expected immutability. + * + * Note that an array containing boxed primitives can be wrapped in an `ArraySeq` without + * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime, + * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast: + * `ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[Int]]`. The values are still + * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing + * `ArraySeq.unsafeWrapArray(a.asInstanceOf[Array[Int]])` does not work, it throws a + * `ClassCastException` at runtime. + */ + def unsafeWrapArray[T](x: Array[T]): ArraySeq[T] = ((x: @unchecked) match { + case null => null + case x: Array[AnyRef] => new ofRef[AnyRef](x) + case x: Array[Int] => new ofInt(x) + case x: Array[Double] => new ofDouble(x) + case x: Array[Long] => new ofLong(x) + case x: Array[Float] => new ofFloat(x) + case x: Array[Char] => new ofChar(x) + case x: Array[Byte] => new ofByte(x) + case x: Array[Short] => new ofShort(x) + case x: Array[Boolean] => new ofBoolean(x) + case x: Array[Unit] => new ofUnit(x) + }).asInstanceOf[ArraySeq[T]] + + @SerialVersionUID(3L) + final class ofRef[T <: AnyRef](val unsafeArray: Array[T]) extends ArraySeq[T] { + def elemTag = ClassTag[T](unsafeArray.getClass.getComponentType) + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): T = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any): Boolean = that match { + case that: ofRef[_] => + Array.equals( + this.unsafeArray.asInstanceOf[Array[AnyRef]], + that.unsafeArray.asInstanceOf[Array[AnyRef]]) + case _ => super.equals(that) + } + override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq.ofRef[T] = { + if(unsafeArray.length <= 1) this + else { + val a = unsafeArray.clone() + Arrays.sort(a, ord.asInstanceOf[Ordering[T]]) + new ArraySeq.ofRef(a) + } + } + override def iterator: Iterator[T] = new ArrayOps.ArrayIterator[T](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + new ObjectArrayStepper(unsafeArray, 0, unsafeArray.length) + else shape.parUnbox(new ObjectArrayStepper(unsafeArray, 0, unsafeArray.length).asInstanceOf[AnyStepper[T] with EfficientSplit]) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofByte(val unsafeArray: Array[Byte]) extends ArraySeq[Byte] { + protected def elemTag = ClassTag.Byte + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Byte = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofByte => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Byte](implicit ord: Ordering[B]): ArraySeq[Byte] = + if(length <= 1) this + else if(ord eq Ordering.Byte) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofByte(a) + } else super.sorted[B] + override def iterator: Iterator[Byte] = new ArrayOps.ArrayIterator[Byte](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Byte, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedByteArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new WidenedByteArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Byte](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Byte => new ArraySeq.ofByte(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Byte](elem: B): ArraySeq[B] = + elem match { + case b: Byte => new ArraySeq.ofByte(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Byte](elem: B): ArraySeq[B] = + elem match { + case b: Byte => new ArraySeq.ofByte(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofShort(val unsafeArray: Array[Short]) extends ArraySeq[Short] { + protected def elemTag = ClassTag.Short + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Short = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofShort => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Short](implicit ord: Ordering[B]): ArraySeq[Short] = + if(length <= 1) this + else if(ord eq Ordering.Short) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofShort(a) + } else super.sorted[B] + override def iterator: Iterator[Short] = new ArrayOps.ArrayIterator[Short](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Short, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedShortArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new WidenedShortArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Short](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Short => new ArraySeq.ofShort(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Short](elem: B): ArraySeq[B] = + elem match { + case b: Short => new ArraySeq.ofShort(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Short](elem: B): ArraySeq[B] = + elem match { + case b: Short => new ArraySeq.ofShort(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofChar(val unsafeArray: Array[Char]) extends ArraySeq[Char] { + protected def elemTag = ClassTag.Char + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Char = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofChar => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Char](implicit ord: Ordering[B]): ArraySeq[Char] = + if(length <= 1) this + else if(ord eq Ordering.Char) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofChar(a) + } else super.sorted[B] + override def iterator: Iterator[Char] = new ArrayOps.ArrayIterator[Char](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedCharArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new WidenedCharArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Char](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Char => new ArraySeq.ofChar(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Char](elem: B): ArraySeq[B] = + elem match { + case b: Char => new ArraySeq.ofChar(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Char](elem: B): ArraySeq[B] = + elem match { + case b: Char => new ArraySeq.ofChar(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = + (new MutableArraySeq.ofChar(unsafeArray)).addString(sb, start, sep, end) + } + + @SerialVersionUID(3L) + final class ofInt(val unsafeArray: Array[Int]) extends ArraySeq[Int] { + protected def elemTag = ClassTag.Int + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Int = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofInt => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Int](implicit ord: Ordering[B]): ArraySeq[Int] = + if(length <= 1) this + else if(ord eq Ordering.Int) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofInt(a) + } else super.sorted[B] + override def iterator: Iterator[Int] = new ArrayOps.ArrayIterator[Int](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new IntArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new IntArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Int](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Int => new ArraySeq.ofInt(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Int](elem: B): ArraySeq[B] = + elem match { + case b: Int => new ArraySeq.ofInt(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Int](elem: B): ArraySeq[B] = + elem match { + case b: Int => new ArraySeq.ofInt(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofLong(val unsafeArray: Array[Long]) extends ArraySeq[Long] { + protected def elemTag = ClassTag.Long + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Long = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofLong => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Long](implicit ord: Ordering[B]): ArraySeq[Long] = + if(length <= 1) this + else if(ord eq Ordering.Long) { + val a = unsafeArray.clone() + Arrays.sort(a) + new ArraySeq.ofLong(a) + } else super.sorted[B] + override def iterator: Iterator[Long] = new ArrayOps.ArrayIterator[Long](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Long, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParLongStepper(new LongArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new LongArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Long](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Long => new ArraySeq.ofLong(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Long](elem: B): ArraySeq[B] = + elem match { + case b: Long => new ArraySeq.ofLong(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Long](elem: B): ArraySeq[B] = + elem match { + case b: Long => new ArraySeq.ofLong(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofFloat(val unsafeArray: Array[Float]) extends ArraySeq[Float] { + protected def elemTag = ClassTag.Float + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Float = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofFloat => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def iterator: Iterator[Float] = new ArrayOps.ArrayIterator[Float](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Float, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new WidenedFloatArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new WidenedFloatArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Float](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Float => new ArraySeq.ofFloat(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Float](elem: B): ArraySeq[B] = + elem match { + case b: Float => new ArraySeq.ofFloat(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Float](elem: B): ArraySeq[B] = + elem match { + case b: Float => new ArraySeq.ofFloat(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofDouble(val unsafeArray: Array[Double]) extends ArraySeq[Double] { + protected def elemTag = ClassTag.Double + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Double = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofDouble => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def iterator: Iterator[Double] = new ArrayOps.ArrayIterator[Double](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Double, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new DoubleArrayStepper(unsafeArray, 0, unsafeArray.length)) + else new DoubleArrayStepper(unsafeArray, 0, unsafeArray.length) + ).asInstanceOf[S with EfficientSplit] + override def updated[B >: Double](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Double => new ArraySeq.ofDouble(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Double](elem: B): ArraySeq[B] = + elem match { + case b: Double => new ArraySeq.ofDouble(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Double](elem: B): ArraySeq[B] = + elem match { + case b: Double => new ArraySeq.ofDouble(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofBoolean(val unsafeArray: Array[Boolean]) extends ArraySeq[Boolean] { + protected def elemTag = ClassTag.Boolean + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Boolean = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofBoolean => Arrays.equals(unsafeArray, that.unsafeArray) + case _ => super.equals(that) + } + override def sorted[B >: Boolean](implicit ord: Ordering[B]): ArraySeq[Boolean] = + if(length <= 1) this + else if(ord eq Ordering.Boolean) { + val a = unsafeArray.clone() + Sorting.stableSort(a) + new ArraySeq.ofBoolean(a) + } else super.sorted[B] + override def iterator: Iterator[Boolean] = new ArrayOps.ArrayIterator[Boolean](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Boolean, S]): S with EfficientSplit = + new BoxedBooleanArrayStepper(unsafeArray, 0, unsafeArray.length).asInstanceOf[S with EfficientSplit] + override def updated[B >: Boolean](index: Int, elem: B): ArraySeq[B] = + elem match { + case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.updated(index, b)) + case _ => super.updated(index, elem) + } + override def appended[B >: Boolean](elem: B): ArraySeq[B] = + elem match { + case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.appended(b)) + case _ => super.appended(elem) + } + override def prepended[B >: Boolean](elem: B): ArraySeq[B] = + elem match { + case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.prepended(b)) + case _ => super.prepended(elem) + } + } + + @SerialVersionUID(3L) + final class ofUnit(val unsafeArray: Array[Unit]) extends ArraySeq[Unit] { + protected def elemTag = ClassTag.Unit + def length: Int = unsafeArray.length + @throws[ArrayIndexOutOfBoundsException] + def apply(i: Int): Unit = unsafeArray(i) + override def hashCode = MurmurHash3.arraySeqHash(unsafeArray) + override def equals(that: Any) = that match { + case that: ofUnit => unsafeArray.length == that.unsafeArray.length + case _ => super.equals(that) + } + override def iterator: Iterator[Unit] = new ArrayOps.ArrayIterator[Unit](unsafeArray) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Unit, S]): S with EfficientSplit = + new ObjectArrayStepper[AnyRef](unsafeArray.asInstanceOf[Array[AnyRef]], 0, unsafeArray.length).asInstanceOf[S with EfficientSplit] + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/BitSet.scala b/tests/pos-special/stdlib/collection/immutable/BitSet.scala new file mode 100644 index 000000000000..9461264850a9 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/BitSet.scala @@ -0,0 +1,375 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import BitSetOps.{LogWL, updateArray} +import mutable.Builder +import scala.annotation.{implicitNotFound, nowarn} + +/** A class for immutable bitsets. + * $bitsetinfo + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#immutable-bitsets "Scala's Collection Library overview"]] + * section on `Immutable BitSets` for more information. + * + * @define Coll `immutable.BitSet` + * @define coll immutable bitset + */ +sealed abstract class BitSet + extends AbstractSet[Int] + with SortedSet[Int] + with SortedSetOps[Int, SortedSet, BitSet] + with StrictOptimizedSortedSetOps[Int, SortedSet, BitSet] + with collection.BitSet + with collection.BitSetOps[BitSet] + with Serializable { + + override def unsorted: Set[Int] = this + + override protected def fromSpecific(coll: IterableOnce[Int]): BitSet = bitSetFactory.fromSpecific(coll) + override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder + override def empty: BitSet = bitSetFactory.empty + + def bitSetFactory = BitSet + + protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): BitSet = BitSet.fromBitMaskNoCopy(elems) + + def incl(elem: Int): BitSet = { + require(elem >= 0, "bitset element must be >= 0") + if (contains(elem)) this + else { + val idx = elem >> LogWL + updateWord(idx, word(idx) | (1L << elem)) + } + } + + def excl(elem: Int): BitSet = { + require(elem >= 0, "bitset element must be >= 0") + if (contains(elem)) { + val idx = elem >> LogWL + updateWord(idx, word(idx) & ~(1L << elem)) + } else this + } + + /** Update word at index `idx`; enlarge set if `idx` outside range of set. + */ + protected def updateWord(idx: Int, w: Long): BitSet + + override def map(f: Int => Int): BitSet = strictOptimizedMap(newSpecificBuilder, f) + override def map[B](f: Int => B)(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].map(f) + + override def flatMap(f: Int => IterableOnce[Int]): BitSet = strictOptimizedFlatMap(newSpecificBuilder, f) + override def flatMap[B](f: Int => IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].flatMap(f) + + override def collect(pf: PartialFunction[Int, Int]): BitSet = strictOptimizedCollect(newSpecificBuilder, pf) + override def collect[B](pf: scala.PartialFunction[Int, B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].collect(pf) + + // necessary for disambiguation + override def zip[B](that: scala.IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.zipOrdMsg) ev: Ordering[(Int, B)]): SortedSet[(Int, B)] = + super.zip(that) + + protected[this] def writeReplace(): AnyRef = new BitSet.SerializationProxy(this) +} + +/** + * $factoryInfo + * @define Coll `immutable.BitSet` + * @define coll immutable bitset + */ +@nowarn("cat=deprecation&msg=Implementation classes of BitSet should not be accessed directly") +@SerialVersionUID(3L) +object BitSet extends SpecificIterableFactory[Int, BitSet] { + + def fromSpecific(it: scala.collection.IterableOnce[Int]): BitSet = + it match { + case bs: BitSet => bs + case _ => (newBuilder ++= it).result() + } + + final val empty: BitSet = new BitSet1(0L) + + def newBuilder: Builder[Int, BitSet] = + mutable.BitSet.newBuilder.mapResult(bs => fromBitMaskNoCopy(bs.elems)) + + private def createSmall(a: Long, b: Long): BitSet = if (b == 0L) new BitSet1(a) else new BitSet2(a, b) + + /** A bitset containing all the bits in an array */ + def fromBitMask(elems: Array[Long]): BitSet = { + val len = elems.length + if (len == 0) empty + else if (len == 1) new BitSet1(elems(0)) + else if (len == 2) createSmall(elems(0), elems(1)) + else { + val a = java.util.Arrays.copyOf(elems, len) + new BitSetN(a) + } + } + + /** A bitset containing all the bits in an array, wrapping the existing + * array without copying. + */ + def fromBitMaskNoCopy(elems: Array[Long]): BitSet = { + val len = elems.length + if (len == 0) empty + else if (len == 1) new BitSet1(elems(0)) + else if (len == 2) createSmall(elems(0), elems(1)) + else new BitSetN(elems) + } + + @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") + class BitSet1(val elems: Long) extends BitSet { + protected[collection] def nwords = 1 + protected[collection] def word(idx: Int) = if (idx == 0) elems else 0L + protected[collection] def updateWord(idx: Int, w: Long): BitSet = + if (idx == 0) new BitSet1(w) + else if (idx == 1) createSmall(elems, w) + else this.fromBitMaskNoCopy(updateArray(Array(elems), idx, w)) + + + override def diff(other: collection.Set[Int]): BitSet = other match { + case bs: collection.BitSet => bs.nwords match { + case 0 => this + case _ => + val newElems = elems & ~bs.word(0) + if (newElems == 0L) this.empty else new BitSet1(newElems) + } + case _ => super.diff(other) + } + + override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { + val _elems = BitSetOps.computeWordForFilter(pred, isFlipped, elems, 0) + if (_elems == 0L) this.empty else new BitSet1(_elems) + } + } + + @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") + class BitSet2(val elems0: Long, val elems1: Long) extends BitSet { + protected[collection] def nwords = 2 + protected[collection] def word(idx: Int) = if (idx == 0) elems0 else if (idx == 1) elems1 else 0L + protected[collection] def updateWord(idx: Int, w: Long): BitSet = + if (idx == 0) new BitSet2(w, elems1) + else if (idx == 1) createSmall(elems0, w) + else this.fromBitMaskNoCopy(updateArray(Array(elems0, elems1), idx, w)) + + + override def diff(other: collection.Set[Int]): BitSet = other match { + case bs: collection.BitSet => bs.nwords match { + case 0 => this + case 1 => + new BitSet2(elems0 & ~bs.word(0), elems1) + case _ => + val _elems0 = elems0 & ~bs.word(0) + val _elems1 = elems1 & ~bs.word(1) + + if (_elems1 == 0L) { + if (_elems0 == 0L) { + this.empty + } else { + new BitSet1(_elems0) + } + } else { + new BitSet2(_elems0, _elems1) + } + } + case _ => super.diff(other) + } + + override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { + val _elems0 = BitSetOps.computeWordForFilter(pred, isFlipped, elems0, 0) + val _elems1 = BitSetOps.computeWordForFilter(pred, isFlipped, elems1, 1) + + if (_elems1 == 0L) { + if (_elems0 == 0L) { + this.empty + } + else new BitSet1(_elems0) + } + else new BitSet2(_elems0, _elems1) + } + } + + @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0") + class BitSetN(val elems: Array[Long]) extends BitSet { + protected[collection] def nwords = elems.length + + protected[collection] def word(idx: Int) = if (idx < nwords) elems(idx) else 0L + + protected[collection] def updateWord(idx: Int, w: Long): BitSet = this.fromBitMaskNoCopy(updateArray(elems, idx, w)) + + override def diff(that: collection.Set[Int]): BitSet = that match { + case bs: collection.BitSet => + /* + * Algorithm: + * + * We iterate, word-by-word, backwards from the shortest of the two bitsets (this, or bs) i.e. the one with + * the fewer words. Two extra concerns for optimization are described below. + * + * Array Shrinking: + * If `this` is not longer than `bs`, then since we must iterate through the full array of words, + * we can track the new highest index word which is non-zero, at little additional cost. At the end, the new + * Array[Long] allocated for the returned BitSet will only be of size `maxNonZeroIndex + 1` + * + * Tracking Changes: + * If the two sets are disjoint, then we can return `this`. Therefor, until at least one change is detected, + * we check each word for if it has changed from its corresponding word in `this`. Once a single change is + * detected, we stop checking because the cost of the new Array must be paid anyways. + */ + + val bsnwords = bs.nwords + val thisnwords = nwords + if (bsnwords >= thisnwords) { + // here, we may have opportunity to shrink the size of the array + // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length + var i = thisnwords - 1 + var currentWord = 0L + // if there are never any changes, we can return `this` at the end + var anyChanges = false + while (i >= 0 && currentWord == 0L) { + val oldWord = word(i) + currentWord = oldWord & ~bs.word(i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + i match { + case -1 => + if (anyChanges) { + if (currentWord == 0) { + this.empty + } else { + new BitSet1(currentWord) + } + } else { + this + } + case 0 => + val oldFirstWord = word(0) + val firstWord = oldFirstWord & ~bs.word(0) + anyChanges ||= firstWord != oldFirstWord + if (anyChanges) { + new BitSet2(firstWord, currentWord) + } else { + this + } + case _ => + val minimumNonZeroIndex: Int = i + 1 + while (!anyChanges && i >= 0) { + val oldWord = word(i) + currentWord = oldWord & ~bs.word(i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + if (anyChanges) { + val newArray = elems.take(minimumNonZeroIndex + 1) + newArray(i + 1) = currentWord + while (i >= 0) { + newArray(i) = word(i) & ~bs.word(i) + i -= 1 + } + new BitSetN(newArray) + } else { + this + } + } + } else { + var i = bsnwords - 1 + var anyChanges = false + var currentWord = 0L + while (i >= 0 && !anyChanges) { + val oldWord = word(i) + currentWord = oldWord & ~bs.word(i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + if (anyChanges) { + val newElems = elems.clone() + newElems(i + 1) = currentWord + while (i >= 0) { + newElems(i) = word(i) & ~bs.word(i) + i -= 1 + } + this.fromBitMaskNoCopy(newElems) + } else { + this + } + } + case _ => super.diff(that) + } + + + override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { + // here, we may have opportunity to shrink the size of the array + // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length + var i = nwords - 1 + var currentWord = 0L + // if there are never any changes, we can return `this` at the end + var anyChanges = false + while (i >= 0 && currentWord == 0L) { + val oldWord = word(i) + currentWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldWord, i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + i match { + case -1 => + if (anyChanges) { + if (currentWord == 0) { + this.empty + } else { + new BitSet1(currentWord) + } + } else { + this + } + case 0 => + val oldFirstWord = word(0) + val firstWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldFirstWord, 0) + anyChanges ||= firstWord != oldFirstWord + if (anyChanges) { + new BitSet2(firstWord, currentWord) + } else { + this + } + case _ => + val minimumNonZeroIndex: Int = i + 1 + while (!anyChanges && i >= 0) { + val oldWord = word(i) + currentWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldWord, i) + anyChanges ||= currentWord != oldWord + i -= 1 + } + if (anyChanges) { + val newArray = elems.take(minimumNonZeroIndex + 1) + newArray(i + 1) = currentWord + while (i >= 0) { + newArray(i) = BitSetOps.computeWordForFilter(pred, isFlipped, word(i), i) + i -= 1 + } + new BitSetN(newArray) + } else { + this + } + } + } + + override def toBitMask: Array[Long] = elems.clone() + } + + @SerialVersionUID(3L) + private final class SerializationProxy(coll: BitSet) extends scala.collection.BitSet.SerializationProxy(coll) { + protected[this] def readResolve(): Any = BitSet.fromBitMaskNoCopy(elems) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala b/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala new file mode 100644 index 000000000000..711332567b0f --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala @@ -0,0 +1,252 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.immutable + + +import java.lang.Integer.bitCount +import java.lang.Math.ceil +import java.lang.System.arraycopy + +private[collection] object Node { + final val HashCodeLength = 32 + + final val BitPartitionSize = 5 + + final val BitPartitionMask = (1 << BitPartitionSize) - 1 + + final val MaxDepth = ceil(HashCodeLength.toDouble / BitPartitionSize).toInt + + final val BranchingFactor = 1 << BitPartitionSize + + final def maskFrom(hash: Int, shift: Int): Int = (hash >>> shift) & BitPartitionMask + + final def bitposFrom(mask: Int): Int = 1 << mask + + final def indexFrom(bitmap: Int, bitpos: Int): Int = bitCount(bitmap & (bitpos - 1)) + + final def indexFrom(bitmap: Int, mask: Int, bitpos: Int): Int = if (bitmap == -1) mask else indexFrom(bitmap, bitpos) + +} + +private[collection] abstract class Node[T <: Node[T]] { + + def hasNodes: Boolean + + def nodeArity: Int + + def getNode(index: Int): T + + def hasPayload: Boolean + + def payloadArity: Int + + def getPayload(index: Int): Any + + def getHash(index: Int): Int + + def cachedJavaKeySetHashCode: Int + + private final def arrayIndexOutOfBounds(as: Array[_], ix:Int): ArrayIndexOutOfBoundsException = + new ArrayIndexOutOfBoundsException(s"$ix is out of bounds (min 0, max ${as.length-1}") + + protected final def removeElement(as: Array[Int], ix: Int): Array[Int] = { + if (ix < 0) throw arrayIndexOutOfBounds(as, ix) + if (ix > as.length - 1) throw arrayIndexOutOfBounds(as, ix) + val result = new Array[Int](as.length - 1) + arraycopy(as, 0, result, 0, ix) + arraycopy(as, ix + 1, result, ix, as.length - ix - 1) + result + } + + protected final def removeAnyElement(as: Array[Any], ix: Int): Array[Any] = { + if (ix < 0) throw arrayIndexOutOfBounds(as, ix) + if (ix > as.length - 1) throw arrayIndexOutOfBounds(as, ix) + val result = new Array[Any](as.length - 1) + arraycopy(as, 0, result, 0, ix) + arraycopy(as, ix + 1, result, ix, as.length - ix - 1) + result + } + + protected final def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { + if (ix < 0) throw arrayIndexOutOfBounds(as, ix) + if (ix > as.length) throw arrayIndexOutOfBounds(as, ix) + val result = new Array[Int](as.length + 1) + arraycopy(as, 0, result, 0, ix) + result(ix) = elem + arraycopy(as, ix, result, ix + 1, as.length - ix) + result + } + protected final def insertAnyElement(as: Array[Any], ix: Int, elem: Int): Array[Any] = { + if (ix < 0) throw arrayIndexOutOfBounds(as, ix) + if (ix > as.length) throw arrayIndexOutOfBounds(as, ix) + val result = new Array[Any](as.length + 1) + arraycopy(as, 0, result, 0, ix) + result(ix) = elem + arraycopy(as, ix, result, ix + 1, as.length - ix) + result + } +} + +/** + * Base class for fixed-stack iterators that traverse a hash-trie. The iterator performs a + * depth-first pre-order traversal, which yields first all payload elements of the current + * node before traversing sub-nodes (left to right). + * + * @tparam T the trie node type we are iterating over + */ +private[immutable] abstract class ChampBaseIterator[T <: Node[T]] { + + import Node.MaxDepth + + // Note--this code is duplicated to a large extent both in + // ChampBaseReverseIterator and in convert.impl.ChampStepperBase. + // If you change this code, check those also in case they also + // need to be modified. + + protected var currentValueCursor: Int = 0 + protected var currentValueLength: Int = 0 + protected var currentValueNode: T = _ + + private[this] var currentStackLevel: Int = -1 + private[this] var nodeCursorsAndLengths: Array[Int] = _ + private[this] var nodes: Array[T] = _ + private def initNodes(): Unit = { + if (nodeCursorsAndLengths eq null) { + nodeCursorsAndLengths = new Array[Int](MaxDepth * 2) + nodes = new Array[Node[T]](MaxDepth).asInstanceOf[Array[T]] + } + } + + def this(rootNode: T) = { + this() + if (rootNode.hasNodes) pushNode(rootNode) + if (rootNode.hasPayload) setupPayloadNode(rootNode) + } + + private final def setupPayloadNode(node: T): Unit = { + currentValueNode = node + currentValueCursor = 0 + currentValueLength = node.payloadArity + } + + private final def pushNode(node: T): Unit = { + initNodes() + currentStackLevel = currentStackLevel + 1 + + val cursorIndex = currentStackLevel * 2 + val lengthIndex = currentStackLevel * 2 + 1 + + nodes(currentStackLevel) = node + nodeCursorsAndLengths(cursorIndex) = 0 + nodeCursorsAndLengths(lengthIndex) = node.nodeArity + } + + private final def popNode(): Unit = { + currentStackLevel = currentStackLevel - 1 + } + + /** + * Searches for next node that contains payload values, + * and pushes encountered sub-nodes on a stack for depth-first traversal. + */ + private final def searchNextValueNode(): Boolean = { + while (currentStackLevel >= 0) { + val cursorIndex = currentStackLevel * 2 + val lengthIndex = currentStackLevel * 2 + 1 + + val nodeCursor = nodeCursorsAndLengths(cursorIndex) + val nodeLength = nodeCursorsAndLengths(lengthIndex) + + if (nodeCursor < nodeLength) { + nodeCursorsAndLengths(cursorIndex) += 1 + + val nextNode = nodes(currentStackLevel).getNode(nodeCursor) + + if (nextNode.hasNodes) { pushNode(nextNode) } + if (nextNode.hasPayload) { setupPayloadNode(nextNode) ; return true } + } else { + popNode() + } + } + + return false + } + + final def hasNext = (currentValueCursor < currentValueLength) || searchNextValueNode() + +} + +/** + * Base class for fixed-stack iterators that traverse a hash-trie in reverse order. The base + * iterator performs a depth-first post-order traversal, traversing sub-nodes (right to left). + * + * @tparam T the trie node type we are iterating over + */ +private[immutable] abstract class ChampBaseReverseIterator[T <: Node[T]] { + + import Node.MaxDepth + + protected var currentValueCursor: Int = -1 + protected var currentValueNode: T = _ + + private[this] var currentStackLevel: Int = -1 + private[this] val nodeIndex: Array[Int] = new Array[Int](MaxDepth + 1) + private[this] val nodeStack: Array[T] = new Array[Node[T]](MaxDepth + 1).asInstanceOf[Array[T]] + + def this(rootNode: T) = { + this() + pushNode(rootNode) + searchNextValueNode() + } + + private final def setupPayloadNode(node: T): Unit = { + currentValueNode = node + currentValueCursor = node.payloadArity - 1 + } + + private final def pushNode(node: T): Unit = { + currentStackLevel = currentStackLevel + 1 + + nodeStack(currentStackLevel) = node + nodeIndex(currentStackLevel) = node.nodeArity - 1 + } + + private final def popNode(): Unit = { + currentStackLevel = currentStackLevel - 1 + } + + /** + * Searches for rightmost node that contains payload values, + * and pushes encountered sub-nodes on a stack for depth-first traversal. + */ + private final def searchNextValueNode(): Boolean = { + while (currentStackLevel >= 0) { + val nodeCursor = nodeIndex(currentStackLevel) ; nodeIndex(currentStackLevel) = nodeCursor - 1 + + if (nodeCursor >= 0) { + val nextNode = nodeStack(currentStackLevel).getNode(nodeCursor) + pushNode(nextNode) + } else { + val currNode = nodeStack(currentStackLevel) + popNode() + + if (currNode.hasPayload) { setupPayloadNode(currNode) ; return true } + } + } + + return false + } + + final def hasNext = (currentValueCursor >= 0) || searchNextValueNode() + +} diff --git a/tests/pos-special/stdlib/collection/immutable/HashMap.scala b/tests/pos-special/stdlib/collection/immutable/HashMap.scala new file mode 100644 index 000000000000..2e8378c4d810 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/HashMap.scala @@ -0,0 +1,2423 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.immutable + +import java.lang.Integer.bitCount +import java.lang.System.arraycopy + +import scala.annotation.unchecked.{uncheckedVariance => uV} +import scala.collection.Hashing.improve +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable, mutable.ReusableBuilder +import scala.collection.{Iterator, MapFactory, MapFactoryDefaults, Stepper, StepperShape, mutable} +import scala.runtime.AbstractFunction2 +import scala.runtime.Statics.releaseFence +import scala.util.hashing.MurmurHash3 + +/** This class implements immutable maps using a Compressed Hash-Array Mapped Prefix-tree. + * See paper https://michael.steindorfer.name/publications/oopsla15.pdf for more details. + * + * @tparam K the type of the keys contained in this hash set. + * @tparam V the type of the values associated with the keys in this hash map. + * + * @define Coll `immutable.HashMap` + * @define coll immutable champ hash map + */ + +final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: BitmapIndexedMapNode[K, V]) + extends AbstractMap[K, V] + with StrictOptimizedMapOps[K, V, HashMap, HashMap[K, V]] + with MapFactoryDefaults[K, V, HashMap, Iterable] + with DefaultSerializable { + + def this() = this(MapNode.empty) + + // This release fence is present because rootNode may have previously been mutated during construction. + releaseFence() + + override def mapFactory: MapFactory[HashMap] = HashMap + + override def knownSize: Int = rootNode.size + + override def size: Int = rootNode.size + + override def isEmpty: Boolean = rootNode.size == 0 + + override def keySet: Set[K] = if (size == 0) Set.empty else new HashKeySet + + private final class HashKeySet extends ImmutableKeySet { + + private[this] def newKeySetOrThis(newHashMap: HashMap[K, _]): Set[K] = + if (newHashMap eq HashMap.this) this else newHashMap.keySet + private[this] def newKeySetOrThis(newRootNode: BitmapIndexedMapNode[K, _]): Set[K] = + if (newRootNode eq rootNode) this else new HashMap(newRootNode).keySet + + override def incl(elem: K): Set[K] = { + val originalHash = elem.## + val improvedHash = improve(originalHash) + val newNode = rootNode.updated(elem, null.asInstanceOf[V], originalHash, improvedHash, 0, replaceValue = false) + newKeySetOrThis(newNode) + } + override def excl(elem: K): Set[K] = newKeySetOrThis(HashMap.this - elem) + override def filter(pred: K => Boolean): Set[K] = newKeySetOrThis(HashMap.this.filter(kv => pred(kv._1))) + override def filterNot(pred: K => Boolean): Set[K] = newKeySetOrThis(HashMap.this.filterNot(kv => pred(kv._1))) + } + + def iterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else new MapKeyValueTupleIterator[K, V](rootNode) + } + + override def keysIterator: Iterator[K] = { + if (isEmpty) Iterator.empty + else new MapKeyIterator[K, V](rootNode) + } + override def valuesIterator: Iterator[V] = { + if (isEmpty) Iterator.empty + else new MapValueIterator[K, V](rootNode) + } + + protected[immutable] def reverseIterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else new MapKeyValueTupleReverseIterator[K, V](rootNode) + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = + shape. + parUnbox(collection.convert.impl.AnyChampStepper.from[(K, V), MapNode[K, V]](size, rootNode, (node, i) => node.getPayload(i))) + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { + import collection.convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => IntChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Int]) + case StepperShape.LongShape => LongChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleChampStepper.from[MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Double]) + case _ => shape.parUnbox(AnyChampStepper.from[K, MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i))) + } + s.asInstanceOf[S with EfficientSplit] + } + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { + import collection.convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => IntChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Int]) + case StepperShape.LongShape => LongChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleChampStepper.from[MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Double]) + case _ => shape.parUnbox(AnyChampStepper.from[V, MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i))) + } + s.asInstanceOf[S with EfficientSplit] + } + + override final def contains(key: K): Boolean = { + val keyUnimprovedHash = key.## + val keyHash = improve(keyUnimprovedHash) + rootNode.containsKey(key, keyUnimprovedHash, keyHash, 0) + } + + override def apply(key: K): V = { + val keyUnimprovedHash = key.## + val keyHash = improve(keyUnimprovedHash) + rootNode.apply(key, keyUnimprovedHash, keyHash, 0) + } + + def get(key: K): Option[V] = { + val keyUnimprovedHash = key.## + val keyHash = improve(keyUnimprovedHash) + rootNode.get(key, keyUnimprovedHash, keyHash, 0) + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val keyUnimprovedHash = key.## + val keyHash = improve(keyUnimprovedHash) + rootNode.getOrElse(key, keyUnimprovedHash, keyHash, 0, default) + } + + @`inline` private[this] def newHashMapOrThis[V1 >: V](newRootNode: BitmapIndexedMapNode[K, V1]): HashMap[K, V1] = + if (newRootNode eq rootNode) this else new HashMap(newRootNode) + + def updated[V1 >: V](key: K, value: V1): HashMap[K, V1] = { + val keyUnimprovedHash = key.## + newHashMapOrThis(rootNode.updated(key, value, keyUnimprovedHash, improve(keyUnimprovedHash), 0, replaceValue = true)) + } + + // preemptively overridden in anticipation of performance optimizations + override def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): HashMap[K, V1] = + super.updatedWith[V1](key)(remappingFunction) + + def removed(key: K): HashMap[K, V] = { + val keyUnimprovedHash = key.## + newHashMapOrThis(rootNode.removed(key, keyUnimprovedHash, improve(keyUnimprovedHash), 0)) + } + + override def concat[V1 >: V](that: scala.IterableOnce[(K, V1)]): HashMap[K, V1] = that match { + case hm: HashMap[K, V1] => + if (isEmpty) hm + else { + val newNode = rootNode.concat(hm.rootNode, 0) + if (newNode eq hm.rootNode) hm + else newHashMapOrThis(rootNode.concat(hm.rootNode, 0)) + } + case hm: mutable.HashMap[K @unchecked, V @unchecked] => + val iter = hm.nodeIterator + var current = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = hm.unimproveHash(next.hash) + val improved = improve(originalHash) + current = current.updated(next.key, next.value, originalHash, improved, 0, replaceValue = true) + + if (current ne rootNode) { + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + + while (iter.hasNext) { + val next = iter.next() + val originalHash = hm.unimproveHash(next.hash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, next.value, originalHash, improve(originalHash), 0, shallowlyMutableNodeMap) + } + return new HashMap(current) + } + } + this + case lhm: mutable.LinkedHashMap[K @unchecked, V @unchecked] => + val iter = lhm.entryIterator + var current = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhm.unimproveHash(next.hash) + val improved = improve(originalHash) + current = current.updated(next.key, next.value, originalHash, improved, 0, replaceValue = true) + + if (current ne rootNode) { + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhm.unimproveHash(next.hash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, next.value, originalHash, improve(originalHash), 0, shallowlyMutableNodeMap) + } + return new HashMap(current) + } + } + this + case _ => + class accum extends AbstractFunction2[K, V1, Unit] with Function1[(K, V1), Unit] { + var changed = false + var shallowlyMutableNodeMap: Int = 0 + var current: BitmapIndexedMapNode[K, V1] = rootNode + def apply(kv: (K, V1)) = apply(kv._1, kv._2) + def apply(key: K, value: V1): Unit = { + val originalHash = key.## + val improved = improve(originalHash) + if (!changed) { + current = current.updated(key, value, originalHash, improved, 0, replaceValue = true) + if (current ne rootNode) { + // Note: We could have started with shallowlyMutableNodeMap = 0, however this way, in the case that + // the first changed key ended up in a subnode beneath root, we mark that root right away as being + // shallowly mutable. + // + // since key->value has just been inserted, and certainly caused a new root node to be created, we can say with + // certainty that it either caused a new subnode to be created underneath `current`, in which case we should + // carry on mutating that subnode, or it ended up as a child data pair of the root, in which case, no harm is + // done by including its bit position in the shallowlyMutableNodeMap anyways. + changed = true + shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + } + } else { + shallowlyMutableNodeMap = current.updateWithShallowMutations(key, value, originalHash, improved, 0, shallowlyMutableNodeMap) + } + } + } + that match { + case thatMap: Map[K, V1] => + if (thatMap.isEmpty) this + else { + val accum = new accum + thatMap.foreachEntry(accum) + newHashMapOrThis(accum.current) + } + case _ => + val it = that.iterator + if (it.isEmpty) this + else { + val accum = new accum + it.foreach(accum) + newHashMapOrThis(accum.current) + } + } + } + + override def tail: HashMap[K, V] = this - head._1 + + override def init: HashMap[K, V] = this - last._1 + + override def head: (K, V) = iterator.next() + + override def last: (K, V) = reverseIterator.next() + + override def foreach[U](f: ((K, V)) => U): Unit = rootNode.foreach(f) + + override def foreachEntry[U](f: (K, V) => U): Unit = rootNode.foreachEntry(f) + + /** Applies a function to each key, value, and **original** hash value in this Map */ + @`inline` private[collection] def foreachWithHash(f: (K, V, Int) => Unit): Unit = rootNode.foreachWithHash(f) + + override def equals(that: Any): Boolean = + that match { + case map: HashMap[_, _] => (this eq map) || (this.rootNode == map.rootNode) + case _ => super.equals(that) + } + + override def hashCode(): Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + // Optimized to avoid recomputation of key hashcodes as these are cached in the nodes and can be assumed to be + // immutable. + val hashIterator = new MapKeyValueTupleHashIterator(rootNode) + val hash = MurmurHash3.unorderedHash(hashIterator, MurmurHash3.mapSeed) + // assert(hash == super.hashCode()) + hash + } + } + + override protected[this] def className = "HashMap" + + /** Merges this HashMap with an other HashMap by combining all key-value pairs of both maps, and delegating to a merge + * function to resolve any key collisions between the two HashMaps. + * + * @example {{{ + * val left = HashMap(1 -> 1, 2 -> 1) + * val right = HashMap(2 -> 2, 3 -> 2) + * + * val merged = left.merged(right){ case ((k0, v0), (k1, v1)) => (k0 + k1) -> (v0 + v1) } + * // HashMap(1 -> 1, 3 -> 2, 4 -> 3) + * + * }}} + * + * @param that the HashMap to merge this HashMap with + * @param mergef the merge function which resolves collisions between the two HashMaps. If `mergef` is null, then + * keys from `this` will overwrite keys from `that`, making the behaviour equivalent to + * `that.concat(this)` + * + * @note In cases where `mergef` returns keys which themselves collide with other keys returned by `merge`, or + * found in `this` or `that`, it is not defined which value will be chosen. For example: + * + * Colliding multiple results of merging: + * {{{ + * // key `3` collides between a result of merging keys `1` and `2` + * val left = HashMap(1 -> 1, 2 -> 2) + * val right = HashMap(1 -> 1, 2 -> 2) + * + * val merged = left.merged(right){ case (_, (_, v1)) => 3 -> v1 } + * // HashMap(3 -> 2) is returned, but it could also have returned HashMap(3 -> 1) + * }}} + * Colliding results of merging with other keys: + * {{{ + * // key `2` collides between a result of merging `1`, and existing key `2` + * val left = HashMap(1 -> 1, 2 -> 1) + * val right = HashMap(1 -> 2) + * + * val merged = left.merged(right)((_,_) => 2 -> 3) + * // HashMap(2 -> 1) is returned, but it could also have returned HashMap(2 -> 3) + * }}} + * + */ + def merged[V1 >: V](that: HashMap[K, V1])(mergef: ((K, V), (K, V1)) => (K, V1)): HashMap[K, V1] = + if (mergef == null) { + that ++ this + } else { + if (isEmpty) that + else if (that.isEmpty) this + else if (size == 1) { + val payload@(k, v) = rootNode.getPayload(0) + val originalHash = rootNode.getHash(0) + val improved = improve(originalHash) + + if (that.rootNode.containsKey(k, originalHash, improved, 0)) { + val thatPayload = that.rootNode.getTuple(k, originalHash, improved, 0) + val (mergedK, mergedV) = mergef(payload, thatPayload) + val mergedOriginalHash = mergedK.## + val mergedImprovedHash = improve(mergedOriginalHash) + new HashMap(that.rootNode.removed(thatPayload._1, originalHash, improved, 0).updated(mergedK, mergedV, mergedOriginalHash, mergedImprovedHash, 0, replaceValue = true)) + } else { + new HashMap(that.rootNode.updated(k, v, originalHash, improved, 0, replaceValue = true)) + } + } else if (that.size == 0) { + val thatPayload@(k, v) = rootNode.getPayload(0) + val thatOriginalHash = rootNode.getHash(0) + val thatImproved = improve(thatOriginalHash) + + if (rootNode.containsKey(k, thatOriginalHash, thatImproved, 0)) { + val payload = rootNode.getTuple(k, thatOriginalHash, thatImproved, 0) + val (mergedK, mergedV) = mergef(payload, thatPayload) + val mergedOriginalHash = mergedK.## + val mergedImprovedHash = improve(mergedOriginalHash) + new HashMap(rootNode.updated(mergedK, mergedV, mergedOriginalHash, mergedImprovedHash, 0, replaceValue = true)) + } else { + new HashMap(rootNode.updated(k, v, thatOriginalHash, thatImproved, 0, replaceValue = true)) + } + } else { + val builder = new HashMapBuilder[K, V1] + rootNode.mergeInto(that.rootNode, builder, 0)(mergef) + builder.result() + } + } + + override def transform[W](f: (K, V) => W): HashMap[K, W] = + newHashMapOrThis(rootNode.transform[Any](f)).asInstanceOf[HashMap[K, W]] + + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): HashMap[K, V] = { + val newRootNode = rootNode.filterImpl(pred, isFlipped) + if (newRootNode eq rootNode) this + else if (newRootNode.size == 0) HashMap.empty + else new HashMap(newRootNode) + } + + override def removedAll(keys: IterableOnce[K]): HashMap[K, V] = { + if (isEmpty) { + this + } else { + keys match { + case hashSet: HashSet[K] => + if (hashSet.isEmpty) { + this + } else { + // TODO: Remove all keys from the hashSet in a sub-linear fashion by only visiting the nodes in the tree + // This can be a direct port of the implementation of `SetNode[A]#diff(SetNode[A])` + val newRootNode = new MapNodeRemoveAllSetNodeIterator(hashSet.rootNode).removeAll(rootNode) + if (newRootNode eq rootNode) this + else if (newRootNode.size <= 0) HashMap.empty + else new HashMap(newRootNode) + } + case hashSet: collection.mutable.HashSet[K] => + if (hashSet.isEmpty) { + this + } else { + val iter = hashSet.nodeIterator + var curr = rootNode + + while (iter.hasNext) { + val next = iter.next() + val originalHash = hashSet.unimproveHash(next.hash) + val improved = improve(originalHash) + curr = curr.removed(next.key, originalHash, improved, 0) + if (curr.size == 0) { + return HashMap.empty + } + } + newHashMapOrThis(curr) + } + case lhashSet: collection.mutable.LinkedHashSet[K] => + if (lhashSet.isEmpty) { + this + } else { + val iter = lhashSet.entryIterator + var curr = rootNode + + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhashSet.unimproveHash(next.hash) + val improved = improve(originalHash) + curr = curr.removed(next.key, originalHash, improved, 0) + if (curr.size == 0) { + return HashMap.empty + } + } + newHashMapOrThis(curr) + } + case _ => + val iter = keys.iterator + var curr = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = next.## + val improved = improve(originalHash) + curr = curr.removed(next, originalHash, improved, 0) + if (curr.size == 0) { + return HashMap.empty + } + } + newHashMapOrThis(curr) + } + } + } + + override def partition(p: ((K, V)) => Boolean): (HashMap[K, V], HashMap[K, V]) = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `partition` could be optimized to traverse the trie node-by-node, splitting each node into two, + // based on the result of applying `p` to its elements and subnodes. + super.partition(p) + } + + override def take(n: Int): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `take` could be optimized to construct a new trie structure by visiting each node, and including + // those nodes in the resulting trie, until `n` total elements have been included. + super.take(n) + } + + override def takeRight(n: Int): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `take` could be optimized to construct a new trie structure by visiting each node in reverse, and + // and including those nodes in the resulting trie, until `n` total elements have been included. + super.takeRight(n) + } + + override def takeWhile(p: ((K, V)) => Boolean): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `takeWhile` could be optimized to construct a new trie structure by visiting each node, and + // including those nodes in the resulting trie, until `p` returns `false` + super.takeWhile(p) + } + + override def dropWhile(p: ((K, V)) => Boolean): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `dropWhile` could be optimized to construct a new trie structure by visiting each node, and + // dropping those nodes in the resulting trie, until `p` returns `true` + super.dropWhile(p) + } + + override def dropRight(n: Int): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `dropRight` could be optimized to construct a new trie structure by visiting each node, in reverse + // order, and dropping all nodes until `n` elements have been dropped + super.dropRight(n) + } + + override def drop(n: Int): HashMap[K, V] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `dropRight` could be optimized to construct a new trie structure by visiting each node, and + // dropping all nodes until `n` elements have been dropped + super.drop(n) + } + + override def span(p: ((K, V)) => Boolean): (HashMap[K, V], HashMap[K, V]) = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + // + // In particular, `scan` could be optimized to construct a new trie structure by visiting each node, and + // keeping each node and element until `p` returns false, then including the remaining nodes in the second result. + // This would avoid having to rebuild most of the trie, and would eliminate the need to perform hashing and equality + // checks. + super.span(p) + } + +} + +private[immutable] object MapNode { + + private final val EmptyMapNode = new BitmapIndexedMapNode(0, 0, Array.empty, Array.empty, 0, 0) + + def empty[K, V]: BitmapIndexedMapNode[K, V] = EmptyMapNode.asInstanceOf[BitmapIndexedMapNode[K, V]] + + final val TupleLength = 2 + +} + + +private[immutable] sealed abstract class MapNode[K, +V] extends Node[MapNode[K, V @uV]] { + def apply(key: K, originalHash: Int, hash: Int, shift: Int): V + + def get(key: K, originalHash: Int, hash: Int, shift: Int): Option[V] + + def getOrElse[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int, f: => V1): V1 + + def containsKey(key: K, originalHash: Int, hash: Int, shift: Int): Boolean + + /** Returns a MapNode with the passed key-value assignment added + * + * @param key the key to add to the MapNode + * @param value the value to associate with `key` + * @param originalHash the original hash of `key` + * @param hash the improved hash of `key` + * @param shift the shift of the node (distanceFromRoot * BitPartitionSize) + * @param replaceValue if true, then the value currently associated to `key` will be replaced with the passed value + * argument. + * if false, then the key will be inserted if not already present, however if the key is present + * then the passed value will not replace the current value. That is, if `false`, then this + * method has `update if not exists` semantics. + */ + def updated[V1 >: V](key: K, value: V1, originalHash: Int, hash: Int, shift: Int, replaceValue: Boolean): MapNode[K, V1] + + def removed[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int): MapNode[K, V1] + + def hasNodes: Boolean + + def nodeArity: Int + + def getNode(index: Int): MapNode[K, V] + + def hasPayload: Boolean + + def payloadArity: Int + + def getKey(index: Int): K + + def getValue(index: Int): V + + def getPayload(index: Int): (K, V) + + def size: Int + + def foreach[U](f: ((K, V)) => U): Unit + + def foreachEntry[U](f: (K, V) => U): Unit + + def foreachWithHash(f: (K, V, Int) => Unit): Unit + + def transform[W](f: (K, V) => W): MapNode[K, W] + + def copy(): MapNode[K, V] + + def concat[V1 >: V](that: MapNode[K, V1], shift: Int): MapNode[K, V1] + + def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): MapNode[K, V] + + /** Merges this node with that node, adding each resulting tuple to `builder` + * + * `this` should be a node from `left` hashmap in `left.merged(right)(mergef)` + * + * @param that node from the "right" HashMap. Must also be at the same "path" or "position" within the right tree, + * as `this` is, within the left tree + */ + def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit + + /** Returns the exact (equal by reference) key, and value, associated to a given key. + * If the key is not bound to a value, then an exception is thrown + */ + def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) + + /** Adds all key-value pairs to a builder */ + def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit +} + +private final class BitmapIndexedMapNode[K, +V]( + var dataMap: Int, + var nodeMap: Int, + var content: Array[Any], + var originalHashes: Array[Int], + var size: Int, + var cachedJavaKeySetHashCode: Int) extends MapNode[K, V] { + + releaseFence() + + import MapNode._ + import Node._ + + /* + assert(checkInvariantContentIsWellTyped()) + assert(checkInvariantSubNodesAreCompacted()) + + private final def checkInvariantSubNodesAreCompacted(): Boolean = + new MapKeyValueTupleIterator[K, V](this).size - payloadArity >= 2 * nodeArity + + private final def checkInvariantContentIsWellTyped(): Boolean = { + val predicate1 = TupleLength * payloadArity + nodeArity == content.length + + val predicate2 = Range(0, TupleLength * payloadArity) + .forall(i => content(i).isInstanceOf[MapNode[_, _]] == false) + + val predicate3 = Range(TupleLength * payloadArity, content.length) + .forall(i => content(i).isInstanceOf[MapNode[_, _]] == true) + + predicate1 && predicate2 && predicate3 + } + */ + + def getKey(index: Int): K = content(TupleLength * index).asInstanceOf[K] + def getValue(index: Int): V = content(TupleLength * index + 1).asInstanceOf[V] + + def getPayload(index: Int) = Tuple2( + content(TupleLength * index).asInstanceOf[K], + content(TupleLength * index + 1).asInstanceOf[V]) + + override def getHash(index: Int): Int = originalHashes(index) + + def getNode(index: Int): MapNode[K, V] = + content(content.length - 1 - index).asInstanceOf[MapNode[K, V]] + + def apply(key: K, originalHash: Int, keyHash: Int, shift: Int): V = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + if (key == getKey(index)) getValue(index) else throw new NoSuchElementException(s"key not found: $key") + } else if ((nodeMap & bitpos) != 0) { + getNode(indexFrom(nodeMap, mask, bitpos)).apply(key, originalHash, keyHash, shift + BitPartitionSize) + } else { + throw new NoSuchElementException(s"key not found: $key") + } + } + + def get(key: K, originalHash: Int, keyHash: Int, shift: Int): Option[V] = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = this.getKey(index) + if (key == key0) Some(this.getValue(index)) else None + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + this.getNode(index).get(key, originalHash, keyHash, shift + BitPartitionSize) + } else { + None + } + } + + override def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) = { + val mask = maskFrom(hash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val payload = getPayload(index) + if (key == payload._1) payload else throw new NoSuchElementException + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + getNode(index).getTuple(key, originalHash, hash, shift + BitPartitionSize) + } else { + throw new NoSuchElementException + } + } + + def getOrElse[V1 >: V](key: K, originalHash: Int, keyHash: Int, shift: Int, f: => V1): V1 = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = this.getKey(index) + if (key == key0) getValue(index) else f + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + this.getNode(index).getOrElse(key, originalHash, keyHash, shift + BitPartitionSize, f) + } else { + f + } + } + + override def containsKey(key: K, originalHash: Int, keyHash: Int, shift: Int): Boolean = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + // assert(hashes(index) == computeHash(this.getKey(index)), (hashes.toSeq, content.toSeq, index, key, keyHash, shift)) + (originalHashes(index) == originalHash) && key == getKey(index) + } else if ((nodeMap & bitpos) != 0) { + getNode(indexFrom(nodeMap, mask, bitpos)).containsKey(key, originalHash, keyHash, shift + BitPartitionSize) + } else { + false + } + } + + + def updated[V1 >: V](key: K, value: V1, originalHash: Int, keyHash: Int, shift: Int, replaceValue: Boolean): BitmapIndexedMapNode[K, V1] = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = getKey(index) + val key0UnimprovedHash = getHash(index) + if (key0UnimprovedHash == originalHash && key0 == key) { + if (replaceValue) { + val value0 = this.getValue(index) + if ((key0.asInstanceOf[AnyRef] eq key.asInstanceOf[AnyRef]) && (value0.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef])) + this + else copyAndSetValue(bitpos, key, value) + } else this + } else { + val value0 = this.getValue(index) + val key0Hash = improve(key0UnimprovedHash) + val subNodeNew = mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize) + + copyAndMigrateFromInlineToNode(bitpos, key0Hash, subNodeNew) + } + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + val subNodeNew = subNode.updated(key, value, originalHash, keyHash, shift + BitPartitionSize, replaceValue) + + if (subNodeNew eq subNode) this else copyAndSetNode(bitpos, subNode, subNodeNew) + } else copyAndInsertValue(bitpos, key, originalHash, keyHash, value) + } + + /** A variant of `updated` which performs shallow mutations on the root (`this`), and if possible, on immediately + * descendant child nodes (only one level beneath `this`) + * + * The caller should pass a bitmap of child nodes of this node, which this method may mutate. + * If this method may mutate a child node, then if the updated key-value belongs in that child node, it will + * be shallowly mutated (its children will not be mutated). + * + * If instead this method may not mutate the child node in which the to-be-updated key-value pair belongs, then + * that child will be updated immutably, but the result will be mutably re-inserted as a child of this node. + * + * @param key the key to update + * @param value the value to set `key` to + * @param originalHash key.## + * @param keyHash the improved hash + * @param shallowlyMutableNodeMap bitmap of child nodes of this node, which can be shallowly mutated + * during the call to this method + * + * @return Int which is the bitwise OR of shallowlyMutableNodeMap and any freshly created nodes, which will be + * available for mutations in subsequent calls. + */ + def updateWithShallowMutations[V1 >: V](key: K, value: V1, originalHash: Int, keyHash: Int, shift: Int, shallowlyMutableNodeMap: Int): Int = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = getKey(index) + val key0UnimprovedHash = getHash(index) + if (key0UnimprovedHash == originalHash && key0 == key) { + val value0 = this.getValue(index) + if (!((key0.asInstanceOf[AnyRef] eq key.asInstanceOf[AnyRef]) && (value0.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]))) { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + content(idx + 1) = value + } + shallowlyMutableNodeMap + } else { + val value0 = this.getValue(index) + val key0Hash = improve(key0UnimprovedHash) + + val subNodeNew = mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize) + migrateFromInlineToNodeInPlace(bitpos, key0Hash, subNodeNew) + shallowlyMutableNodeMap | bitpos + } + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + val subNodeSize = subNode.size + val subNodeHashCode = subNode.cachedJavaKeySetHashCode + + var returnMutableNodeMap = shallowlyMutableNodeMap + + val subNodeNew: MapNode[K, V1] = subNode match { + case subNodeBm: BitmapIndexedMapNode[K, V] if (bitpos & shallowlyMutableNodeMap) != 0 => + subNodeBm.updateWithShallowMutations(key, value, originalHash, keyHash, shift + BitPartitionSize, 0) + subNodeBm + case _ => + val result = subNode.updated(key, value, originalHash, keyHash, shift + BitPartitionSize, replaceValue = true) + if (result ne subNode) { + returnMutableNodeMap |= bitpos + } + result + } + + this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew + this.size = this.size - subNodeSize + subNodeNew.size + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNodeHashCode + subNodeNew.cachedJavaKeySetHashCode + returnMutableNodeMap + } else { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + dst(idx + 1) = value + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + this.dataMap |= bitpos + this.content = dst + this.originalHashes = insertElement(originalHashes, dataIx, originalHash) + this.size += 1 + this.cachedJavaKeySetHashCode += keyHash + shallowlyMutableNodeMap + } + } + + def removed[V1 >: V](key: K, originalHash: Int, keyHash: Int, shift: Int): BitmapIndexedMapNode[K, V1] = { + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val key0 = this.getKey(index) + + if (key0 == key) { + if (this.payloadArity == 2 && this.nodeArity == 0) { + /* + * Create new node with remaining pair. The new node will a) either become the new root + * returned, or b) unwrapped and inlined during returning. + */ + val newDataMap = if (shift == 0) (dataMap ^ bitpos) else bitposFrom(maskFrom(keyHash, 0)) + if (index == 0) + new BitmapIndexedMapNode[K, V1](newDataMap, 0, Array(getKey(1), getValue(1)), Array(originalHashes(1)), 1, improve(getHash(1))) + else + new BitmapIndexedMapNode[K, V1](newDataMap, 0, Array(getKey(0), getValue(0)), Array(originalHashes(0)), 1, improve(getHash(0))) + } else copyAndRemoveValue(bitpos, keyHash) + } else this + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + + val subNodeNew = subNode.removed(key, originalHash, keyHash, shift + BitPartitionSize) + // assert(subNodeNew.size != 0, "Sub-node must have at least one element.") + + if (subNodeNew eq subNode) return this + + // cache just in case subNodeNew is a hashCollision node, in which in which case a little arithmetic is avoided + // in Vector#length + val subNodeNewSize = subNodeNew.size + + if (subNodeNewSize == 1) { + if (this.size == subNode.size) { + // subNode is the only child (no other data or node children of `this` exist) + // escalate (singleton or empty) result + subNodeNew.asInstanceOf[BitmapIndexedMapNode[K, V]] + } else { + // inline value (move to front) + copyAndMigrateFromNodeToInline(bitpos, subNode, subNodeNew) + } + } else if (subNodeNewSize > 1) { + // modify current node (set replacement node) + copyAndSetNode(bitpos, subNode, subNodeNew) + } else this + } else this + } + + def mergeTwoKeyValPairs[V1 >: V](key0: K, value0: V1, originalHash0: Int, keyHash0: Int, key1: K, value1: V1, originalHash1: Int, keyHash1: Int, shift: Int): MapNode[K, V1] = { + // assert(key0 != key1) + + if (shift >= HashCodeLength) { + new HashCollisionMapNode[K, V1](originalHash0, keyHash0, Vector((key0, value0), (key1, value1))) + } else { + val mask0 = maskFrom(keyHash0, shift) + val mask1 = maskFrom(keyHash1, shift) + val newCachedHash = keyHash0 + keyHash1 + + if (mask0 != mask1) { + // unique prefixes, payload fits on same level + val dataMap = bitposFrom(mask0) | bitposFrom(mask1) + + if (mask0 < mask1) { + new BitmapIndexedMapNode[K, V1](dataMap, 0, Array(key0, value0, key1, value1), Array(originalHash0, originalHash1), 2, newCachedHash) + } else { + new BitmapIndexedMapNode[K, V1](dataMap, 0, Array(key1, value1, key0, value0), Array(originalHash1, originalHash0), 2, newCachedHash) + } + } else { + // identical prefixes, payload must be disambiguated deeper in the trie + val nodeMap = bitposFrom(mask0) + val node = mergeTwoKeyValPairs(key0, value0, originalHash0, keyHash0, key1, value1, originalHash1, keyHash1, shift + BitPartitionSize) + new BitmapIndexedMapNode[K, V1](0, nodeMap, Array(node), Array.emptyIntArray, node.size, node.cachedJavaKeySetHashCode) + } + } + } + + def hasNodes: Boolean = nodeMap != 0 + + def nodeArity: Int = bitCount(nodeMap) + + def hasPayload: Boolean = dataMap != 0 + + def payloadArity: Int = bitCount(dataMap) + + def dataIndex(bitpos: Int) = bitCount(dataMap & (bitpos - 1)) + + def nodeIndex(bitpos: Int) = bitCount(nodeMap & (bitpos - 1)) + + def copyAndSetValue[V1 >: V](bitpos: Int, newKey: K, newValue: V1): BitmapIndexedMapNode[K, V1] = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length) + + // copy 'src' and set 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, src.length) + //dst(idx) = newKey + dst(idx + 1) = newValue + new BitmapIndexedMapNode[K, V1](dataMap, nodeMap, dst, originalHashes, size, cachedJavaKeySetHashCode) + } + + def copyAndSetNode[V1 >: V](bitpos: Int, oldNode: MapNode[K, V1], newNode: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = { + val idx = this.content.length - 1 - this.nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length) + + // copy 'src' and set 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, src.length) + dst(idx) = newNode + new BitmapIndexedMapNode[K, V1]( + dataMap, + nodeMap, + dst, + originalHashes, + size - oldNode.size + newNode.size, + cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + newNode.cachedJavaKeySetHashCode + ) + } + + def copyAndInsertValue[V1 >: V](bitpos: Int, key: K, originalHash: Int, keyHash: Int, value: V1): BitmapIndexedMapNode[K, V1] = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + dst(idx + 1) = value + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + val dstHashes = insertElement(originalHashes, dataIx, originalHash) + + new BitmapIndexedMapNode[K, V1](dataMap | bitpos, nodeMap, dst, dstHashes, size + 1, cachedJavaKeySetHashCode + keyHash) + } + + def copyAndRemoveValue(bitpos: Int, keyHash: Int): BitmapIndexedMapNode[K, V] = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length - TupleLength) + + // copy 'src' and remove 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + arraycopy(src, idx + TupleLength, dst, idx, src.length - idx - TupleLength) + + val dstHashes = removeElement(originalHashes, dataIx) + + new BitmapIndexedMapNode[K, V](dataMap ^ bitpos, nodeMap, dst, dstHashes, size - 1, cachedJavaKeySetHashCode - keyHash) + } + + /** Variant of `copyAndMigrateFromInlineToNode` which mutates `this` rather than returning a new node. + * + * @param bitpos the bit position of the data to migrate to node + * @param keyHash the improved hash of the key currently at `bitpos` + * @param node the node to place at `bitpos` beneath `this` + */ + def migrateFromInlineToNodeInPlace[V1 >: V](bitpos: Int, keyHash: Int, node: MapNode[K, V1]): this.type = { + val dataIx = dataIndex(bitpos) + val idxOld = TupleLength * dataIx + val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length - TupleLength + 1) + + // copy 'src' and remove 2 element(s) at position 'idxOld' and + // insert 1 element(s) at position 'idxNew' + // assert(idxOld <= idxNew) + arraycopy(src, 0, dst, 0, idxOld) + arraycopy(src, idxOld + TupleLength, dst, idxOld, idxNew - idxOld) + dst(idxNew) = node + arraycopy(src, idxNew + TupleLength, dst, idxNew + 1, src.length - idxNew - TupleLength) + + val dstHashes = removeElement(originalHashes, dataIx) + + this.dataMap = dataMap ^ bitpos + this.nodeMap = nodeMap | bitpos + this.content = dst + this.originalHashes = dstHashes + this.size = size - 1 + node.size + this.cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode + this + } + + def copyAndMigrateFromInlineToNode[V1 >: V](bitpos: Int, keyHash: Int, node: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = { + val dataIx = dataIndex(bitpos) + val idxOld = TupleLength * dataIx + val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length - TupleLength + 1) + + // copy 'src' and remove 2 element(s) at position 'idxOld' and + // insert 1 element(s) at position 'idxNew' + // assert(idxOld <= idxNew) + arraycopy(src, 0, dst, 0, idxOld) + arraycopy(src, idxOld + TupleLength, dst, idxOld, idxNew - idxOld) + dst(idxNew) = node + arraycopy(src, idxNew + TupleLength, dst, idxNew + 1, src.length - idxNew - TupleLength) + + val dstHashes = removeElement(originalHashes, dataIx) + + new BitmapIndexedMapNode[K, V1]( + dataMap = dataMap ^ bitpos, + nodeMap = nodeMap | bitpos, + content = dst, + originalHashes = dstHashes, + size = size - 1 + node.size, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode + ) + } + + def copyAndMigrateFromNodeToInline[V1 >: V](bitpos: Int, oldNode: MapNode[K, V1], node: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = { + val idxOld = this.content.length - 1 - nodeIndex(bitpos) + val dataIxNew = dataIndex(bitpos) + val idxNew = TupleLength * dataIxNew + + val key = node.getKey(0) + val value = node.getValue(0) + val src = this.content + val dst = new Array[Any](src.length - 1 + TupleLength) + + // copy 'src' and remove 1 element(s) at position 'idxOld' and + // insert 2 element(s) at position 'idxNew' + // assert(idxOld >= idxNew) + arraycopy(src, 0, dst, 0, idxNew) + dst(idxNew) = key + dst(idxNew + 1) = value + arraycopy(src, idxNew, dst, idxNew + TupleLength, idxOld - idxNew) + arraycopy(src, idxOld + 1, dst, idxOld + TupleLength, src.length - idxOld - 1) + val hash = node.getHash(0) + val dstHashes = insertElement(originalHashes, dataIxNew, hash) + new BitmapIndexedMapNode[K, V1]( + dataMap = dataMap | bitpos, + nodeMap = nodeMap ^ bitpos, + content = dst, + originalHashes = dstHashes, + size = size - oldNode.size + 1, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode + ) + } + + override def foreach[U](f: ((K, V)) => U): Unit = { + val iN = payloadArity // arity doesn't change during this operation + var i = 0 + while (i < iN) { + f(getPayload(i)) + i += 1 + } + + val jN = nodeArity // arity doesn't change during this operation + var j = 0 + while (j < jN) { + getNode(j).foreach(f) + j += 1 + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val iN = payloadArity // arity doesn't change during this operation + var i = 0 + while (i < iN) { + f(getKey(i), getValue(i)) + i += 1 + } + + val jN = nodeArity // arity doesn't change during this operation + var j = 0 + while (j < jN) { + getNode(j).foreachEntry(f) + j += 1 + } + } + + override def foreachWithHash(f: (K, V, Int) => Unit): Unit = { + var i = 0 + val iN = payloadArity // arity doesn't change during this operation + while (i < iN) { + f(getKey(i), getValue(i), getHash(i)) + i += 1 + } + + val jN = nodeArity // arity doesn't change during this operation + var j = 0 + while (j < jN) { + getNode(j).foreachWithHash(f) + j += 1 + } + } + override def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit = { + var i = 0 + val iN = payloadArity + val jN = nodeArity + while (i < iN) { + builder.addOne(getKey(i), getValue(i), getHash(i)) + i += 1 + } + + var j = 0 + while (j < jN) { + getNode(j).buildTo(builder) + j += 1 + } + } + + override def transform[W](f: (K, V) => W): BitmapIndexedMapNode[K, W] = { + var newContent: Array[Any] = null + val iN = payloadArity // arity doesn't change during this operation + val jN = nodeArity // arity doesn't change during this operation + val newContentLength = content.length + var i = 0 + while (i < iN) { + val key = getKey(i) + val value = getValue(i) + val newValue = f(key, value) + if (newContent eq null) { + if (newValue.asInstanceOf[AnyRef] ne value.asInstanceOf[AnyRef]) { + newContent = content.clone() + newContent(TupleLength * i + 1) = newValue + } + } else { + newContent(TupleLength * i + 1) = newValue + } + i += 1 + } + + var j = 0 + while (j < jN) { + val node = getNode(j) + val newNode = node.transform(f) + if (newContent eq null) { + if (newNode ne node) { + newContent = content.clone() + newContent(newContentLength - j - 1) = newNode + } + } else + newContent(newContentLength - j - 1) = newNode + j += 1 + } + if (newContent eq null) this.asInstanceOf[BitmapIndexedMapNode[K, W]] + else new BitmapIndexedMapNode[K, W](dataMap, nodeMap, newContent, originalHashes, size, cachedJavaKeySetHashCode) + } + + override def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit = that match { + case bm: BitmapIndexedMapNode[K, V] @unchecked => + if (size == 0) { + that.buildTo(builder) + return + } else if (bm.size == 0) { + buildTo(builder) + return + } + + val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap + + val minIndex: Int = Integer.numberOfTrailingZeros(allMap) + val maxIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) + + { + var index = minIndex + var leftIdx = 0 + var rightIdx = 0 + + while (index < maxIndex) { + val bitpos = bitposFrom(index) + + if ((bitpos & dataMap) != 0) { + val leftKey = getKey(leftIdx) + val leftValue = getValue(leftIdx) + val leftOriginalHash = getHash(leftIdx) + if ((bitpos & bm.dataMap) != 0) { + // left data and right data + val rightKey = bm.getKey(rightIdx) + val rightValue = bm.getValue(rightIdx) + val rightOriginalHash = bm.getHash(rightIdx) + if (leftOriginalHash == rightOriginalHash && leftKey == rightKey) { + builder.addOne(mergef((leftKey, leftValue), (rightKey, rightValue))) + } else { + builder.addOne(leftKey, leftValue, leftOriginalHash) + builder.addOne(rightKey, rightValue, rightOriginalHash) + } + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + // left data and right node + val subNode = bm.getNode(bm.nodeIndex(bitpos)) + val leftImprovedHash = improve(leftOriginalHash) + val removed = subNode.removed(leftKey, leftOriginalHash, leftImprovedHash, shift + BitPartitionSize) + if (removed eq subNode) { + // no overlap in leftData and rightNode, just build both children to builder + subNode.buildTo(builder) + builder.addOne(leftKey, leftValue, leftOriginalHash, leftImprovedHash) + } else { + // there is collision, so special treatment for that key + removed.buildTo(builder) + builder.addOne(mergef((leftKey, leftValue), subNode.getTuple(leftKey, leftOriginalHash, leftImprovedHash, shift + BitPartitionSize))) + } + } else { + // left data and nothing on right + builder.addOne(leftKey, leftValue, leftOriginalHash) + } + leftIdx += 1 + } else if ((bitpos & nodeMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + // left node and right data + val rightKey = bm.getKey(rightIdx) + val rightValue = bm.getValue(rightIdx) + val rightOriginalHash = bm.getHash(rightIdx) + val rightImprovedHash = improve(rightOriginalHash) + + val subNode = getNode(nodeIndex(bitpos)) + val removed = subNode.removed(rightKey, rightOriginalHash, rightImprovedHash, shift + BitPartitionSize) + if (removed eq subNode) { + // no overlap in leftNode and rightData, just build both children to builder + subNode.buildTo(builder) + builder.addOne(rightKey, rightValue, rightOriginalHash, rightImprovedHash) + } else { + // there is collision, so special treatment for that key + removed.buildTo(builder) + builder.addOne(mergef(subNode.getTuple(rightKey, rightOriginalHash, rightImprovedHash, shift + BitPartitionSize), (rightKey, rightValue))) + } + rightIdx += 1 + + } else if ((bitpos & bm.nodeMap) != 0) { + // left node and right node + getNode(nodeIndex(bitpos)).mergeInto(bm.getNode(bm.nodeIndex(bitpos)), builder, shift + BitPartitionSize)(mergef) + } else { + // left node and nothing on right + getNode(nodeIndex(bitpos)).buildTo(builder) + } + } else if ((bitpos & bm.dataMap) != 0) { + // nothing on left, right data + val dataIndex = bm.dataIndex(bitpos) + builder.addOne(bm.getKey(dataIndex),bm.getValue(dataIndex), bm.getHash(dataIndex)) + rightIdx += 1 + + } else if ((bitpos & bm.nodeMap) != 0) { + // nothing on left, right node + bm.getNode(bm.nodeIndex(bitpos)).buildTo(builder) + } + + index += 1 + } + } + case _: HashCollisionMapNode[_, _] => + throw new Exception("Cannot merge BitmapIndexedMapNode with HashCollisionMapNode") + } + + override def equals(that: Any): Boolean = + that match { + case node: BitmapIndexedMapNode[_, _] => + (this eq node) || + (this.cachedJavaKeySetHashCode == node.cachedJavaKeySetHashCode) && + (this.nodeMap == node.nodeMap) && + (this.dataMap == node.dataMap) && + (this.size == node.size) && + java.util.Arrays.equals(this.originalHashes, node.originalHashes) && + deepContentEquality(this.content, node.content, content.length) + case _ => false + } + + @`inline` private def deepContentEquality(a1: Array[Any], a2: Array[Any], length: Int): Boolean = { + if (a1 eq a2) + true + else { + var isEqual = true + var i = 0 + + while (isEqual && i < length) { + isEqual = a1(i) == a2(i) + i += 1 + } + + isEqual + } + } + + override def hashCode(): Int = + throw new UnsupportedOperationException("Trie nodes do not support hashing.") + + override def concat[V1 >: V](that: MapNode[K, V1], shift: Int): BitmapIndexedMapNode[K, V1] = that match { + case bm: BitmapIndexedMapNode[K, V] @unchecked => + if (size == 0) return bm + else if (bm.size == 0 || (bm eq this)) return this + else if (bm.size == 1) { + val originalHash = bm.getHash(0) + return this.updated(bm.getKey(0), bm.getValue(0), originalHash, improve(originalHash), shift, replaceValue = true) + } + // if we go through the merge and the result does not differ from `bm`, we can just return `bm`, to improve sharing + // So, `anyChangesMadeSoFar` will be set to `true` as soon as we encounter a difference between the + // currently-being-computed result, and `bm` + var anyChangesMadeSoFar = false + + val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap + + // minimumIndex is inclusive -- it is the first index for which there is data or nodes + val minimumBitPos: Int = Node.bitposFrom(Integer.numberOfTrailingZeros(allMap)) + // maximumIndex is inclusive -- it is the last index for which there is data or nodes + // it could not be exclusive, because then upper bound in worst case (Node.BranchingFactor) would be out-of-bound + // of int bitposition representation + val maximumBitPos: Int = Node.bitposFrom(Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - 1) + + var leftNodeRightNode = 0 + var leftDataRightNode = 0 + var leftNodeRightData = 0 + var leftDataOnly = 0 + var rightDataOnly = 0 + var leftNodeOnly = 0 + var rightNodeOnly = 0 + var leftDataRightDataMigrateToNode = 0 + var leftDataRightDataRightOverwrites = 0 + + var dataToNodeMigrationTargets = 0 + + { + var bitpos = minimumBitPos + var leftIdx = 0 + var rightIdx = 0 + var finished = false + + while (!finished) { + + if ((bitpos & dataMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + val leftOriginalHash = getHash(leftIdx) + if (leftOriginalHash == bm.getHash(rightIdx) && getKey(leftIdx) == bm.getKey(rightIdx)) { + leftDataRightDataRightOverwrites |= bitpos + } else { + leftDataRightDataMigrateToNode |= bitpos + dataToNodeMigrationTargets |= Node.bitposFrom(Node.maskFrom(improve(leftOriginalHash), shift)) + } + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + leftDataRightNode |= bitpos + } else { + leftDataOnly |= bitpos + } + leftIdx += 1 + } else if ((bitpos & nodeMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + leftNodeRightData |= bitpos + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + leftNodeRightNode |= bitpos + } else { + leftNodeOnly |= bitpos + } + } else if ((bitpos & bm.dataMap) != 0) { + rightDataOnly |= bitpos + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + rightNodeOnly |= bitpos + } + + if (bitpos == maximumBitPos) { + finished = true + } else { + bitpos = bitpos << 1 + } + } + } + + + val newDataMap = leftDataOnly | rightDataOnly | leftDataRightDataRightOverwrites + + val newNodeMap = + leftNodeRightNode | + leftDataRightNode | + leftNodeRightData | + leftNodeOnly | + rightNodeOnly | + dataToNodeMigrationTargets + + + if ((newDataMap == (rightDataOnly | leftDataRightDataRightOverwrites)) && (newNodeMap == rightNodeOnly)) { + // nothing from `this` will make it into the result -- return early + return bm + } + + val newDataSize = bitCount(newDataMap) + val newContentSize = (MapNode.TupleLength * newDataSize) + bitCount(newNodeMap) + + val newContent = new Array[Any](newContentSize) + val newOriginalHashes = new Array[Int](newDataSize) + var newSize = 0 + var newCachedHashCode = 0 + + { + var leftDataIdx = 0 + var rightDataIdx = 0 + var leftNodeIdx = 0 + var rightNodeIdx = 0 + + val nextShift = shift + Node.BitPartitionSize + + var compressedDataIdx = 0 + var compressedNodeIdx = 0 + + var bitpos = minimumBitPos + var finished = false + + while (!finished) { + + if ((bitpos & leftNodeRightNode) != 0) { + val rightNode = bm.getNode(rightNodeIdx) + val newNode = getNode(leftNodeIdx).concat(rightNode, nextShift) + if (rightNode ne newNode) { + anyChangesMadeSoFar = true + } + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + leftNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + + } else if ((bitpos & leftDataRightNode) != 0) { + val newNode = { + val n = bm.getNode(rightNodeIdx) + val leftKey = getKey(leftDataIdx) + val leftValue = getValue(leftDataIdx) + val leftOriginalHash = getHash(leftDataIdx) + val leftImproved = improve(leftOriginalHash) + + val updated = n.updated(leftKey, leftValue, leftOriginalHash, leftImproved, nextShift, replaceValue = false) + + if (updated ne n) { + anyChangesMadeSoFar = true + } + + updated + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + leftDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } + else if ((bitpos & leftNodeRightData) != 0) { + anyChangesMadeSoFar = true + val newNode = { + val rightOriginalHash = bm.getHash(rightDataIdx) + getNode(leftNodeIdx).updated( + key = bm.getKey(rightDataIdx), + value = bm.getValue(rightDataIdx), + originalHash = bm.getHash(rightDataIdx), + hash = improve(rightOriginalHash), + shift = nextShift, + replaceValue = true + ) + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftNodeIdx += 1 + rightDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + + } else if ((bitpos & leftDataOnly) != 0) { + anyChangesMadeSoFar = true + val originalHash = originalHashes(leftDataIdx) + newContent(MapNode.TupleLength * compressedDataIdx) = getKey(leftDataIdx).asInstanceOf[AnyRef] + newContent(MapNode.TupleLength * compressedDataIdx + 1) = getValue(leftDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + leftDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + } else if ((bitpos & rightDataOnly) != 0) { + val originalHash = bm.originalHashes(rightDataIdx) + newContent(MapNode.TupleLength * compressedDataIdx) = bm.getKey(rightDataIdx).asInstanceOf[AnyRef] + newContent(MapNode.TupleLength * compressedDataIdx + 1) = bm.getValue(rightDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + rightDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + } else if ((bitpos & leftNodeOnly) != 0) { + anyChangesMadeSoFar = true + val newNode = getNode(leftNodeIdx) + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & rightNodeOnly) != 0) { + val newNode = bm.getNode(rightNodeIdx) + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & leftDataRightDataMigrateToNode) != 0) { + anyChangesMadeSoFar = true + val newNode = { + val leftOriginalHash = getHash(leftDataIdx) + val rightOriginalHash = bm.getHash(rightDataIdx) + + bm.mergeTwoKeyValPairs( + getKey(leftDataIdx), getValue(leftDataIdx), leftOriginalHash, improve(leftOriginalHash), + bm.getKey(rightDataIdx), bm.getValue(rightDataIdx), rightOriginalHash, improve(rightOriginalHash), + nextShift + ) + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftDataIdx += 1 + rightDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & leftDataRightDataRightOverwrites) != 0) { + val originalHash = bm.originalHashes(rightDataIdx) + newContent(MapNode.TupleLength * compressedDataIdx) = bm.getKey(rightDataIdx).asInstanceOf[AnyRef] + newContent(MapNode.TupleLength * compressedDataIdx + 1) = bm.getValue(rightDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + rightDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + leftDataIdx += 1 + } + + if (bitpos == maximumBitPos) { + finished = true + } else { + bitpos = bitpos << 1 + } + } + } + + if (anyChangesMadeSoFar) + new BitmapIndexedMapNode( + dataMap = newDataMap, + nodeMap = newNodeMap, + content = newContent, + originalHashes = newOriginalHashes, + size = newSize, + cachedJavaKeySetHashCode = newCachedHashCode + ) + else bm + + case _ => + // should never happen -- hash collisions are never at the same level as bitmapIndexedMapNodes + throw new UnsupportedOperationException("Cannot concatenate a HashCollisionMapNode with a BitmapIndexedMapNode") + } + + override def copy(): BitmapIndexedMapNode[K, V] = { + val contentClone = content.clone() + val contentLength = contentClone.length + var i = bitCount(dataMap) * TupleLength + while (i < contentLength) { + contentClone(i) = contentClone(i).asInstanceOf[MapNode[K, V]].copy() + i += 1 + } + new BitmapIndexedMapNode[K, V](dataMap, nodeMap, contentClone, originalHashes.clone(), size, cachedJavaKeySetHashCode) + } + + override def filterImpl(pred: ((K, V)) => Boolean, flipped: Boolean): BitmapIndexedMapNode[K, V] = { + if (size == 0) this + else if (size == 1) { + if (pred(getPayload(0)) != flipped) this else MapNode.empty + } else if (nodeMap == 0) { + // Performance optimization for nodes of depth 1: + // + // this node has no "node" children, all children are inlined data elems, therefor logic is significantly simpler + // approach: + // * traverse the content array, accumulating in `newDataMap: Int` any bit positions of keys which pass the filter + // * (bitCount(newDataMap) * TupleLength) tells us the new content array and originalHashes array size, so now perform allocations + // * traverse the content array once more, placing each passing element (according to `newDatamap`) in the new content and originalHashes arrays + // + // note: + // * this optimization significantly improves performance of not only small trees, but also larger trees, since + // even non-root nodes are affected by this improvement, and large trees will consist of many nodes as + // descendants + // + val minimumIndex: Int = Integer.numberOfTrailingZeros(dataMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(dataMap) + + var newDataMap = 0 + var newCachedHashCode = 0 + var dataIndex = 0 + + var i = minimumIndex + + while(i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val passed = pred(payload) != flipped + + if (passed) { + newDataMap |= bitpos + newCachedHashCode += improve(getHash(dataIndex)) + } + + dataIndex += 1 + } + + i += 1 + } + + if (newDataMap == 0) { + MapNode.empty + } else if (newDataMap == dataMap) { + this + } else { + val newSize = Integer.bitCount(newDataMap) + val newContent = new Array[Any](newSize * TupleLength) + val newOriginalHashCodes = new Array[Int](newSize) + val newMaximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(newDataMap) + + var j = Integer.numberOfTrailingZeros(newDataMap) + + var newDataIndex = 0 + + + while (j < newMaximumIndex) { + val bitpos = bitposFrom(j) + if ((bitpos & newDataMap) != 0) { + val oldIndex = indexFrom(dataMap, bitpos) + newContent(newDataIndex * TupleLength) = content(oldIndex * TupleLength) + newContent(newDataIndex * TupleLength + 1) = content(oldIndex * TupleLength + 1) + newOriginalHashCodes(newDataIndex) = originalHashes(oldIndex) + newDataIndex += 1 + } + j += 1 + } + + new BitmapIndexedMapNode(newDataMap, 0, newContent, newOriginalHashCodes, newSize, newCachedHashCode) + } + + + } else { + val allMap = dataMap | nodeMap + val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) + + var oldDataPassThrough = 0 + + // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data + var nodeMigrateToDataTargetMap = 0 + // the queue of single-element, post-filter nodes + var nodesToMigrateToData: mutable.Queue[MapNode[K, V]] = null + + // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node + var nodesToPassThroughMap = 0 + + // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself + // These are stored for later inclusion into the final `content` array + // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) + var mapOfNewNodes = 0 + // each bit in `mapOfNewNodes` corresponds to one element in this queue + var newNodes: mutable.Queue[MapNode[K, V]] = null + + var newDataMap = 0 + var newNodeMap = 0 + var newSize = 0 + var newCachedHashCode = 0 + + var dataIndex = 0 + var nodeIndex = 0 + + var i = minimumIndex + while (i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val passed = pred(payload) != flipped + + if (passed) { + newDataMap |= bitpos + oldDataPassThrough |= bitpos + newSize += 1 + newCachedHashCode += improve(getHash(dataIndex)) + } + + dataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + val oldSubNode = getNode(nodeIndex) + val newSubNode = oldSubNode.filterImpl(pred, flipped) + + newSize += newSubNode.size + newCachedHashCode += newSubNode.cachedJavaKeySetHashCode + + // if (newSubNode.size == 0) do nothing (drop it) + if (newSubNode.size > 1) { + newNodeMap |= bitpos + if (oldSubNode eq newSubNode) { + nodesToPassThroughMap |= bitpos + } else { + mapOfNewNodes |= bitpos + if (newNodes eq null) { + newNodes = mutable.Queue.empty + } + newNodes += newSubNode + } + } else if (newSubNode.size == 1) { + newDataMap |= bitpos + nodeMigrateToDataTargetMap |= bitpos + if (nodesToMigrateToData eq null) { + nodesToMigrateToData = mutable.Queue() + } + nodesToMigrateToData += newSubNode + } + + nodeIndex += 1 + } + + i += 1 + } + + if (newSize == 0) { + MapNode.empty + } else if (newSize == size) { + this + } else { + val newDataSize = bitCount(newDataMap) + val newContentSize = (MapNode.TupleLength * newDataSize) + bitCount(newNodeMap) + val newContent = new Array[Any](newContentSize) + val newOriginalHashes = new Array[Int](newDataSize) + + val newAllMap = newDataMap | newNodeMap + val maxIndex = Node.BranchingFactor - Integer.numberOfLeadingZeros(newAllMap) + + // note: We MUST start from the minimum index in the old (`this`) node, otherwise `old{Node,Data}Index` will + // not be incremented properly. Otherwise we could have started at Integer.numberOfTrailingZeroes(newAllMap) + var i = minimumIndex + + var oldDataIndex = 0 + var oldNodeIndex = 0 + + var newDataIndex = 0 + var newNodeIndex = 0 + + while (i < maxIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & oldDataPassThrough) != 0) { + newContent(newDataIndex * TupleLength) = getKey(oldDataIndex) + newContent(newDataIndex * TupleLength + 1) = getValue(oldDataIndex) + newOriginalHashes(newDataIndex) = getHash(oldDataIndex) + newDataIndex += 1 + oldDataIndex += 1 + } else if ((bitpos & nodesToPassThroughMap) != 0) { + newContent(newContentSize - newNodeIndex - 1) = getNode(oldNodeIndex) + newNodeIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & nodeMigrateToDataTargetMap) != 0) { + // we need not check for null here. If nodeMigrateToDataTargetMap != 0, then nodesMigrateToData must not be null + val node = nodesToMigrateToData.dequeue() + newContent(TupleLength * newDataIndex) = node.getKey(0) + newContent(TupleLength * newDataIndex + 1) = node.getValue(0) + newOriginalHashes(newDataIndex) = node.getHash(0) + newDataIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & mapOfNewNodes) != 0) { + newContent(newContentSize - newNodeIndex - 1) = newNodes.dequeue() + newNodeIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & dataMap) != 0) { + oldDataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + oldNodeIndex += 1 + } + + i += 1 + } + + new BitmapIndexedMapNode[K, V](newDataMap, newNodeMap, newContent, newOriginalHashes, newSize, newCachedHashCode) + } + } + } +} + +private final class HashCollisionMapNode[K, +V ]( + val originalHash: Int, + val hash: Int, + var content: Vector[(K, V @uV)] + ) extends MapNode[K, V] { + + import Node._ + + require(content.length >= 2) + + releaseFence() + + private[immutable] def indexOf(key: Any): Int = { + val iter = content.iterator + var i = 0 + while (iter.hasNext) { + if (iter.next()._1 == key) return i + i += 1 + } + -1 + } + + def size: Int = content.length + + def apply(key: K, originalHash: Int, hash: Int, shift: Int): V = get(key, originalHash, hash, shift).getOrElse(throw new NoSuchElementException) + + def get(key: K, originalHash: Int, hash: Int, shift: Int): Option[V] = + if (this.hash == hash) { + val index = indexOf(key) + if (index >= 0) Some(content(index)._2) else None + } else None + + override def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) = { + val index = indexOf(key) + if (index >= 0) content(index) else throw new NoSuchElementException + } + + def getOrElse[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int, f: => V1): V1 = { + if (this.hash == hash) { + indexOf(key) match { + case -1 => f + case other => content(other)._2 + } + } else f + } + + override def containsKey(key: K, originalHash: Int, hash: Int, shift: Int): Boolean = + this.hash == hash && indexOf(key) >= 0 + + def contains[V1 >: V](key: K, value: V1, hash: Int, shift: Int): Boolean = + this.hash == hash && { + val index = indexOf(key) + index >= 0 && (content(index)._2.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) + } + + def updated[V1 >: V](key: K, value: V1, originalHash: Int, hash: Int, shift: Int, replaceValue: Boolean): MapNode[K, V1] = { + val index = indexOf(key) + if (index >= 0) { + if (replaceValue) { + if (content(index)._2.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) { + this + } else { + new HashCollisionMapNode[K, V1](originalHash, hash, content.updated[(K, V1)](index, (key, value))) + } + } else { + this + } + } else { + new HashCollisionMapNode[K, V1](originalHash, hash, content.appended[(K, V1)]((key, value))) + } + } + + def removed[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int): MapNode[K, V1] = { + if (!this.containsKey(key, originalHash, hash, shift)) { + this + } else { + val updatedContent = content.filterNot(keyValuePair => keyValuePair._1 == key) + // assert(updatedContent.size == content.size - 1) + + updatedContent.size match { + case 1 => + val (k, v) = updatedContent(0) + new BitmapIndexedMapNode[K, V1](bitposFrom(maskFrom(hash, 0)), 0, Array(k, v), Array(originalHash), 1, hash) + case _ => new HashCollisionMapNode[K, V1](originalHash, hash, updatedContent) + } + } + } + + def hasNodes: Boolean = false + + def nodeArity: Int = 0 + + def getNode(index: Int): MapNode[K, V] = + throw new IndexOutOfBoundsException("No sub-nodes present in hash-collision leaf node.") + + def hasPayload: Boolean = true + + def payloadArity: Int = content.length + + def getKey(index: Int): K = getPayload(index)._1 + def getValue(index: Int): V = getPayload(index)._2 + + def getPayload(index: Int): (K, V) = content(index) + + override def getHash(index: Int): Int = originalHash + + def foreach[U](f: ((K, V)) => U): Unit = content.foreach(f) + + def foreachEntry[U](f: (K, V) => U): Unit = content.foreach { case (k, v) => f(k, v)} + + override def foreachWithHash(f: (K, V, Int) => Unit): Unit = { + val iter = content.iterator + while (iter.hasNext) { + val next = iter.next() + f(next._1, next._2, originalHash) + } + } + + override def transform[W](f: (K, V) => W): HashCollisionMapNode[K, W] = { + val newContent = Vector.newBuilder[(K, W)] + val contentIter = content.iterator + // true if any values have been transformed to a different value via `f` + var anyChanges = false + while(contentIter.hasNext) { + val (k, v) = contentIter.next() + val newValue = f(k, v) + newContent.addOne((k, newValue)) + anyChanges ||= (v.asInstanceOf[AnyRef] ne newValue.asInstanceOf[AnyRef]) + } + if (anyChanges) new HashCollisionMapNode(originalHash, hash, newContent.result()) + else this.asInstanceOf[HashCollisionMapNode[K, W]] + } + + override def equals(that: Any): Boolean = + that match { + case node: HashCollisionMapNode[_, _] => + (this eq node) || + (this.hash == node.hash) && + (this.content.length == node.content.length) && { + val iter = content.iterator + while (iter.hasNext) { + val (key, value) = iter.next() + val index = node.indexOf(key) + if (index < 0 || value != node.content(index)._2) { + return false + } + } + true + } + case _ => false + } + + override def concat[V1 >: V](that: MapNode[K, V1], shift: Int): HashCollisionMapNode[K, V1] = that match { + case hc: HashCollisionMapNode[K, V1] => + if (hc eq this) { + this + } else { + var newContent: VectorBuilder[(K, V1)] = null + val iter = content.iterator + while (iter.hasNext) { + val nextPayload = iter.next() + if (hc.indexOf(nextPayload._1) < 0) { + if (newContent eq null) { + newContent = new VectorBuilder[(K, V1)]() + newContent.addAll(hc.content) + } + newContent.addOne(nextPayload) + } + } + if (newContent eq null) hc else new HashCollisionMapNode(originalHash, hash, newContent.result()) + } + case _: BitmapIndexedMapNode[K, V1] => + // should never happen -- hash collisions are never at the same level as bitmapIndexedMapNodes + throw new UnsupportedOperationException("Cannot concatenate a HashCollisionMapNode with a BitmapIndexedMapNode") + } + + + override def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit = that match { + case hc: HashCollisionMapNode[K, V1] => + val iter = content.iterator + val rightArray = hc.content.toArray[AnyRef] // really Array[(K, V1)] + + def rightIndexOf(key: K): Int = { + var i = 0 + while (i < rightArray.length) { + val elem = rightArray(i) + if ((elem ne null) && (elem.asInstanceOf[(K, V1)])._1 == key) return i + i += 1 + } + -1 + } + + while (iter.hasNext) { + val nextPayload = iter.next() + val index = rightIndexOf(nextPayload._1) + + if (index == -1) { + builder.addOne(nextPayload) + } else { + val rightPayload = rightArray(index).asInstanceOf[(K, V1)] + rightArray(index) = null + + builder.addOne(mergef(nextPayload, rightPayload)) + } + } + + var i = 0 + while (i < rightArray.length) { + val elem = rightArray(i) + if (elem ne null) builder.addOne(elem.asInstanceOf[(K, V1)]) + i += 1 + } + case _: BitmapIndexedMapNode[K, V1] => + throw new Exception("Cannot merge HashCollisionMapNode with BitmapIndexedMapNode") + + } + + override def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit = { + val iter = content.iterator + while (iter.hasNext) { + val (k, v) = iter.next() + builder.addOne(k, v, originalHash, hash) + } + } + + override def filterImpl(pred: ((K, V)) => Boolean, flipped: Boolean): MapNode[K, V] = { + val newContent = content.filterImpl(pred, flipped) + val newContentLength = newContent.length + if (newContentLength == 0) { + MapNode.empty + } else if (newContentLength == 1) { + val (k, v) = newContent.head + new BitmapIndexedMapNode[K, V](bitposFrom(maskFrom(hash, 0)), 0, Array(k, v), Array(originalHash), 1, hash) + } else if (newContentLength == content.length) this + else new HashCollisionMapNode(originalHash, hash, newContent) + } + + override def copy(): HashCollisionMapNode[K, V] = new HashCollisionMapNode[K, V](originalHash, hash, content) + + override def hashCode(): Int = + throw new UnsupportedOperationException("Trie nodes do not support hashing.") + + override def cachedJavaKeySetHashCode: Int = size * hash + +} + +private final class MapKeyIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseIterator[MapNode[K, V]](rootNode) with Iterator[K] { + + def next() = { + if (!hasNext) + throw new NoSuchElementException + + val key = currentValueNode.getKey(currentValueCursor) + currentValueCursor += 1 + + key + } + +} + +private final class MapValueIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseIterator[MapNode[K, V]](rootNode) with Iterator[V] { + + def next() = { + if (!hasNext) + throw new NoSuchElementException + + val value = currentValueNode.getValue(currentValueCursor) + currentValueCursor += 1 + + value + } +} + +private final class MapKeyValueTupleIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseIterator[MapNode[K, V]](rootNode) with Iterator[(K, V)] { + + def next() = { + if (!hasNext) + throw new NoSuchElementException + + val payload = currentValueNode.getPayload(currentValueCursor) + currentValueCursor += 1 + + payload + } + +} + +private final class MapKeyValueTupleReverseIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseReverseIterator[MapNode[K, V]](rootNode) with Iterator[(K, V)] { + + def next() = { + if (!hasNext) + throw new NoSuchElementException + + val payload = currentValueNode.getPayload(currentValueCursor) + currentValueCursor -= 1 + + payload + } +} + +private final class MapKeyValueTupleHashIterator[K, V](rootNode: MapNode[K, V]) + extends ChampBaseReverseIterator[MapNode[K, V]](rootNode) with Iterator[Any] { + private[this] var hash = 0 + private[this] var value: V = _ + override def hashCode(): Int = MurmurHash3.tuple2Hash(hash, value.##, MurmurHash3.productSeed) + def next() = { + if (!hasNext) + throw new NoSuchElementException + + hash = currentValueNode.getHash(currentValueCursor) + value = currentValueNode.getValue(currentValueCursor) + currentValueCursor -= 1 + this + } +} + +/** Used in HashMap[K, V]#removeAll(HashSet[K]) */ +private final class MapNodeRemoveAllSetNodeIterator[K](rootSetNode: SetNode[K]) extends ChampBaseIterator(rootSetNode) { + /** Returns the result of immutably removing all keys in `rootSetNode` from `rootMapNode` */ + def removeAll[V](rootMapNode: BitmapIndexedMapNode[K, V]): BitmapIndexedMapNode[K, V] = { + var curr = rootMapNode + while (curr.size > 0 && hasNext) { + val originalHash = currentValueNode.getHash(currentValueCursor) + curr = curr.removed( + key = currentValueNode.getPayload(currentValueCursor), + keyHash = improve(originalHash), + originalHash = originalHash, + shift = 0 + ) + currentValueCursor += 1 + } + curr + } +} + +/** + * $factoryInfo + * + * @define Coll `immutable.HashMap` + * @define coll immutable champ hash map + */ +@SerialVersionUID(3L) +object HashMap extends MapFactory[HashMap] { + + @transient + private final val EmptyMap = new HashMap(MapNode.empty) + + def empty[K, V]: HashMap[K, V] = + EmptyMap.asInstanceOf[HashMap[K, V]] + + def from[K, V](source: collection.IterableOnce[(K, V)]): HashMap[K, V] = + source match { + case hs: HashMap[K, V] => hs + case _ => (newBuilder[K, V] ++= source).result() + } + + /** Create a new Builder which can be reused after calling `result()` without an + * intermediate call to `clear()` in order to build multiple related results. + */ + def newBuilder[K, V]: ReusableBuilder[(K, V), HashMap[K, V]] = new HashMapBuilder[K, V] +} + + +/** A Builder for a HashMap. + * $multipleResults + */ +private[immutable] final class HashMapBuilder[K, V] extends ReusableBuilder[(K, V), HashMap[K, V]] { + import MapNode._ + import Node._ + + private def newEmptyRootNode = new BitmapIndexedMapNode[K, V](0, 0, Array.emptyObjectArray.asInstanceOf[Array[Any]], Array.emptyIntArray, 0, 0) + + /** The last given out HashMap as a return value of `result()`, if any, otherwise null. + * Indicates that on next add, the elements should be copied to an identical structure, before continuing + * mutations. */ + private var aliased: HashMap[K, V] = _ + + private def isAliased: Boolean = aliased != null + + /** The root node of the partially build hashmap */ + private var rootNode: BitmapIndexedMapNode[K, V] = newEmptyRootNode + + private[immutable] def getOrElse[V0 >: V](key: K, value: V0): V0 = + if (rootNode.size == 0) value + else { + val originalHash = key.## + rootNode.getOrElse(key, originalHash, improve(originalHash), 0, value) + } + + /** Inserts element `elem` into array `as` at index `ix`, shifting right the trailing elems */ + private[this] def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { + if (ix < 0) throw new ArrayIndexOutOfBoundsException + if (ix > as.length) throw new ArrayIndexOutOfBoundsException + val result = new Array[Int](as.length + 1) + arraycopy(as, 0, result, 0, ix) + result(ix) = elem + arraycopy(as, ix, result, ix + 1, as.length - ix) + result + } + + /** Inserts key-value into the bitmapIndexMapNode. Requires that this is a new key-value pair */ + private[this] def insertValue[V1 >: V](bm: BitmapIndexedMapNode[K, V],bitpos: Int, key: K, originalHash: Int, keyHash: Int, value: V1): Unit = { + val dataIx = bm.dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = bm.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + dst(idx + 1) = value + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + val dstHashes = insertElement(bm.originalHashes, dataIx, originalHash) + + bm.dataMap |= bitpos + bm.content = dst + bm.originalHashes = dstHashes + bm.size += 1 + bm.cachedJavaKeySetHashCode += keyHash + } + + /** Upserts a key/value pair into mapNode, mutably */ + private[immutable] def update(mapNode: MapNode[K, V], key: K, value: V, originalHash: Int, keyHash: Int, shift: Int): Unit = { + mapNode match { + case bm: BitmapIndexedMapNode[K, V] => + val mask = maskFrom(keyHash, shift) + val bitpos = bitposFrom(mask) + if ((bm.dataMap & bitpos) != 0) { + val index = indexFrom(bm.dataMap, mask, bitpos) + val key0 = bm.getKey(index) + val key0UnimprovedHash = bm.getHash(index) + + if (key0UnimprovedHash == originalHash && key0 == key) { + bm.content(TupleLength * index + 1) = value + } else { + val value0 = bm.getValue(index) + val key0Hash = improve(key0UnimprovedHash) + + val subNodeNew: MapNode[K, V] = + bm.mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize) + + bm.migrateFromInlineToNodeInPlace(bitpos, key0Hash, subNodeNew) + } + + } else if ((bm.nodeMap & bitpos) != 0) { + val index = indexFrom(bm.nodeMap, mask, bitpos) + val subNode = bm.getNode(index) + val beforeSize = subNode.size + val beforeHash = subNode.cachedJavaKeySetHashCode + update(subNode, key, value, originalHash, keyHash, shift + BitPartitionSize) + bm.size += subNode.size - beforeSize + bm.cachedJavaKeySetHashCode += subNode.cachedJavaKeySetHashCode - beforeHash + } else { + insertValue(bm, bitpos, key, originalHash, keyHash, value) + } + case hc: HashCollisionMapNode[K, V] => + val index = hc.indexOf(key) + if (index < 0) { + hc.content = hc.content.appended((key, value)) + } else { + hc.content = hc.content.updated(index, (key, value)) + } + } + } + + /** If currently referencing aliased structure, copy elements to new mutable structure */ + private[this] def ensureUnaliased() = { + if (isAliased) copyElems() + aliased = null + } + + /** Copy elements to new mutable structure */ + private[this] def copyElems(): Unit = { + rootNode = rootNode.copy() + } + + override def result(): HashMap[K, V] = + if (rootNode.size == 0) { + HashMap.empty + } else if (aliased != null) { + aliased + } else { + aliased = new HashMap(rootNode) + releaseFence() + aliased + } + + override def addOne(elem: (K, V)): this.type = { + ensureUnaliased() + val h = elem._1.## + val im = improve(h) + update(rootNode, elem._1, elem._2, h, im, 0) + this + } + + def addOne(key: K, value: V): this.type = { + ensureUnaliased() + val originalHash = key.## + update(rootNode, key, value, originalHash, improve(originalHash), 0) + this + } + def addOne(key: K, value: V, originalHash: Int): this.type = { + ensureUnaliased() + update(rootNode, key, value, originalHash, improve(originalHash), 0) + this + } + def addOne(key: K, value: V, originalHash: Int, hash: Int): this.type = { + ensureUnaliased() + update(rootNode, key, value, originalHash, hash, 0) + this + } + + override def addAll(xs: IterableOnce[(K, V)]): this.type = { + ensureUnaliased() + xs match { + case hm: HashMap[K, V] => + new ChampBaseIterator[MapNode[K, V]](hm.rootNode) { + while(hasNext) { + val originalHash = currentValueNode.getHash(currentValueCursor) + update( + mapNode = rootNode, + key = currentValueNode.getKey(currentValueCursor), + value = currentValueNode.getValue(currentValueCursor), + originalHash = originalHash, + keyHash = improve(originalHash), + shift = 0 + ) + currentValueCursor += 1 + } + } + case hm: collection.mutable.HashMap[K, V] => + val iter = hm.nodeIterator + while (iter.hasNext) { + val next = iter.next() + val originalHash = hm.unimproveHash(next.hash) + val hash = improve(originalHash) + update(rootNode, next.key, next.value, originalHash, hash, 0) + } + case lhm: collection.mutable.LinkedHashMap[K, V] => + val iter = lhm.entryIterator + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhm.unimproveHash(next.hash) + val hash = improve(originalHash) + update(rootNode, next.key, next.value, originalHash, hash, 0) + } + case thatMap: Map[K, V] => + thatMap.foreachEntry((key, value) => addOne(key, value)) + case other => + val it = other.iterator + while(it.hasNext) addOne(it.next()) + } + + this + } + + override def clear(): Unit = { + aliased = null + if (rootNode.size > 0) { + rootNode = newEmptyRootNode + } + } + + private[collection] def size: Int = rootNode.size + + override def knownSize: Int = rootNode.size +} diff --git a/tests/pos-special/stdlib/collection/immutable/HashSet.scala b/tests/pos-special/stdlib/collection/immutable/HashSet.scala new file mode 100644 index 000000000000..459fcf1682aa --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/HashSet.scala @@ -0,0 +1,2123 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import java.lang.Integer.{bitCount, numberOfTrailingZeros} +import java.lang.System.arraycopy + +import scala.collection.Hashing.improve +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.ReusableBuilder +import scala.runtime.Statics.releaseFence +import scala.util.hashing.MurmurHash3 + +/** This class implements immutable sets using a Compressed Hash-Array Mapped Prefix-tree. + * See paper https://michael.steindorfer.name/publications/oopsla15.pdf for more details. + * + * @tparam A the type of the elements contained in this hash set. + * @define Coll `immutable.HashSet` + * @define coll immutable champ hash set + */ +final class HashSet[A] private[immutable](private[immutable] val rootNode: BitmapIndexedSetNode[A]) + extends AbstractSet[A] + with StrictOptimizedSetOps[A, HashSet, HashSet[A]] + with IterableFactoryDefaults[A, HashSet] + with DefaultSerializable { + + def this() = this(SetNode.empty) + + // This release fence is present because rootNode may have previously been mutated during construction. + releaseFence() + + private[this] def newHashSetOrThis(newRootNode: BitmapIndexedSetNode[A]): HashSet[A] = + if (rootNode eq newRootNode) this else new HashSet(newRootNode) + + override def iterableFactory: IterableFactory[HashSet] = HashSet + + override def knownSize: Int = rootNode.size + + override def size: Int = rootNode.size + + override def isEmpty: Boolean = rootNode.size == 0 + + def iterator: Iterator[A] = { + if (isEmpty) Iterator.empty + else new SetIterator[A](rootNode) + } + + protected[immutable] def reverseIterator: Iterator[A] = new SetReverseIterator[A](rootNode) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => IntChampStepper.from[ SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Int]) + case StepperShape.LongShape => LongChampStepper.from[ SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleChampStepper.from[SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Double]) + case _ => shape.parUnbox(AnyChampStepper.from[A, SetNode[A]](size, rootNode, (node, i) => node.getPayload(i))) + } + s.asInstanceOf[S with EfficientSplit] + } + + def contains(element: A): Boolean = { + val elementUnimprovedHash = element.## + val elementHash = improve(elementUnimprovedHash) + rootNode.contains(element, elementUnimprovedHash, elementHash, 0) + } + + def incl(element: A): HashSet[A] = { + val elementUnimprovedHash = element.## + val elementHash = improve(elementUnimprovedHash) + val newRootNode = rootNode.updated(element, elementUnimprovedHash, elementHash, 0) + newHashSetOrThis(newRootNode) + } + + def excl(element: A): HashSet[A] = { + val elementUnimprovedHash = element.## + val elementHash = improve(elementUnimprovedHash) + val newRootNode = rootNode.removed(element, elementUnimprovedHash, elementHash, 0) + newHashSetOrThis(newRootNode) + } + + override def concat(that: IterableOnce[A]): HashSet[A] = + that match { + case hs: HashSet[A] => + if (isEmpty) hs + else { + val newNode = rootNode.concat(hs.rootNode, 0) + if (newNode eq hs.rootNode) hs + else newHashSetOrThis(newNode) + } + case hs: collection.mutable.HashSet[A] => + val iter = hs.nodeIterator + var current = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = hs.unimproveHash(next.hash) + val improved = improve(originalHash) + current = current.updated(next.key, originalHash, improved, 0) + + if (current ne rootNode) { + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + while (iter.hasNext) { + val next = iter.next() + val originalHash = hs.unimproveHash(next.hash) + val improved = improve(originalHash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, originalHash, improved, 0, shallowlyMutableNodeMap) + } + return new HashSet(current) + } + } + this + case lhs: collection.mutable.LinkedHashSet[A] => + val iter = lhs.entryIterator + var current = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhs.unimproveHash(next.hash) + val improved = improve(originalHash) + current = current.updated(next.key, originalHash, improved, 0) + + if (current ne rootNode) { + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + while (iter.hasNext) { + val next = iter.next() + val originalHash = lhs.unimproveHash(next.hash) + val improved = improve(originalHash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, originalHash, improved, 0, shallowlyMutableNodeMap) + } + return new HashSet(current) + } + } + this + case _ => + val iter = that.iterator + var current = rootNode + while (iter.hasNext) { + val element = iter.next() + val originalHash = element.## + val improved = improve(originalHash) + current = current.updated(element, originalHash, improved, 0) + + if (current ne rootNode) { + // Note: We could have started with shallowlyMutableNodeMap = 0, however this way, in the case that + // the first changed key ended up in a subnode beneath root, we mark that root right away as being + // shallowly mutable. + // + // since `element` has just been inserted, and certainly caused a new root node to be created, we can say with + // certainty that it either caused a new subnode to be created underneath `current`, in which case we should + // carry on mutating that subnode, or it ended up as a child data pair of the root, in which case, no harm is + // done by including its bit position in the shallowlyMutableNodeMap anyways. + var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0)) + while (iter.hasNext) { + val element = iter.next() + val originalHash = element.## + val improved = improve(originalHash) + shallowlyMutableNodeMap = current.updateWithShallowMutations(element, originalHash, improved, 0, shallowlyMutableNodeMap) + } + return new HashSet(current) + } + } + this + } + + override def tail: HashSet[A] = this - head + + override def init: HashSet[A] = this - last + + override def head: A = iterator.next() + + override def last: A = reverseIterator.next() + + override def foreach[U](f: A => U): Unit = rootNode.foreach(f) + + /** Applies a function f to each element, and its corresponding **original** hash, in this Set */ + @`inline` private[collection] def foreachWithHash(f: (A, Int) => Unit): Unit = rootNode.foreachWithHash(f) + + /** Applies a function f to each element, and its corresponding **original** hash, in this Set + * Stops iterating the first time that f returns `false`.*/ + @`inline` private[collection] def foreachWithHashWhile(f: (A, Int) => Boolean): Unit = rootNode.foreachWithHashWhile(f) + + def subsetOf(that: Set[A]): Boolean = if (that.isEmpty) true else that match { + case set: HashSet[A] => rootNode.subsetOf(set.rootNode, 0) + case _ => super.subsetOf(that) + } + + override def equals(that: Any): Boolean = + that match { + case set: HashSet[_] => (this eq set) || (this.rootNode == set.rootNode) + case _ => super.equals(that) + } + + override protected[this] def className = "HashSet" + + override def hashCode(): Int = { + val it = new SetHashIterator(rootNode) + val hash = MurmurHash3.unorderedHash(it, MurmurHash3.setSeed) + //assert(hash == super.hashCode()) + hash + } + + override def diff(that: collection.Set[A]): HashSet[A] = { + if (isEmpty) { + this + } else { + that match { + case hashSet: HashSet[A] => + if (hashSet.isEmpty) this else { + val newRootNode = rootNode.diff(hashSet.rootNode, 0) + if (newRootNode.size == 0) HashSet.empty else newHashSetOrThis(rootNode.diff(hashSet.rootNode, 0)) + } + case hashSet: collection.mutable.HashSet[A] => + val iter = hashSet.nodeIterator + var curr = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = hashSet.unimproveHash(next.hash) + val improved = improve(originalHash) + curr = curr.removed(next.key, originalHash, improved, 0) + if (curr ne rootNode) { + if (curr.size == 0) { + return HashSet.empty + } + while (iter.hasNext) { + val next = iter.next() + val originalHash = hashSet.unimproveHash(next.hash) + val improved = improve(originalHash) + + curr.removeWithShallowMutations(next.key, originalHash, improved) + + if (curr.size == 0) { + return HashSet.empty + } + } + return new HashSet(curr) + } + } + this + + case other => + val thatKnownSize = other.knownSize + + if (thatKnownSize == 0) { + this + } else if (thatKnownSize <= size) { + /* this branch intentionally includes the case of thatKnownSize == -1. We know that HashSets are quite fast at look-up, so + we're likely to be the faster of the two at that. */ + removedAllWithShallowMutations(other) + } else { + // TODO: Develop more sophisticated heuristic for which branch to take + filterNot(other.contains) + } + } + + } + } + + /** Immutably removes all elements of `that` from this HashSet + * + * Mutation is used internally, but only on root SetNodes which this method itself creates. + * + * That is, this method is safe to call on published sets because it does not mutate `this` + */ + private[this] def removedAllWithShallowMutations(that: IterableOnce[A]): HashSet[A] = { + val iter = that.iterator + var curr = rootNode + while (iter.hasNext) { + val next = iter.next() + val originalHash = next.## + val improved = improve(originalHash) + curr = curr.removed(next, originalHash, improved, 0) + if (curr ne rootNode) { + if (curr.size == 0) { + return HashSet.empty + } + while (iter.hasNext) { + val next = iter.next() + val originalHash = next.## + val improved = improve(originalHash) + + curr.removeWithShallowMutations(next, originalHash, improved) + + if (curr.size == 0) { + return HashSet.empty + } + } + return new HashSet(curr) + } + } + this + } + + override def removedAll(that: IterableOnce[A]): HashSet[A] = that match { + case set: scala.collection.Set[A] => diff(set) + case range: Range if range.length > size => + filter { + case i: Int => !range.contains(i) + case _ => true + } + + case _ => + removedAllWithShallowMutations(that) + } + + override def partition(p: A => Boolean): (HashSet[A], HashSet[A]) = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.partition(p) + } + + override def span(p: A => Boolean): (HashSet[A], HashSet[A]) = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.span(p) + } + + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): HashSet[A] = { + val newRootNode = rootNode.filterImpl(pred, isFlipped) + if (newRootNode eq rootNode) this + else if (newRootNode.size == 0) HashSet.empty + else new HashSet(newRootNode) + } + + override def intersect(that: collection.Set[A]): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.intersect(that) + } + + override def take(n: Int): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.take(n) + } + + override def takeRight(n: Int): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.takeRight(n) + } + + override def takeWhile(p: A => Boolean): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.takeWhile(p) + } + + override def drop(n: Int): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.drop(n) + } + + override def dropRight(n: Int): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.dropRight(n) + } + + override def dropWhile(p: A => Boolean): HashSet[A] = { + // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included + // in a minor release without breaking binary compatibility. + super.dropWhile(p) + } +} + +private[immutable] object SetNode { + + private final val EmptySetNode = new BitmapIndexedSetNode(0, 0, Array.empty, Array.empty, 0, 0) + + def empty[A]: BitmapIndexedSetNode[A] = EmptySetNode.asInstanceOf[BitmapIndexedSetNode[A]] + + final val TupleLength = 1 + +} + +private[immutable] sealed abstract class SetNode[A] extends Node[SetNode[A]] { + + def contains(element: A, originalHash: Int, hash: Int, shift: Int): Boolean + + def updated(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] + + def removed(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] + + def hasNodes: Boolean + + def nodeArity: Int + + def getNode(index: Int): SetNode[A] + + def hasPayload: Boolean + + def payloadArity: Int + + def getPayload(index: Int): A + + def size: Int + + def foreach[U](f: A => U): Unit + + def subsetOf(that: SetNode[A], shift: Int): Boolean + + def copy(): SetNode[A] + + def filterImpl(pred: A => Boolean, flipped: Boolean): SetNode[A] + + def diff(that: SetNode[A], shift: Int): SetNode[A] + + def concat(that: SetNode[A], shift: Int): SetNode[A] + + def foreachWithHash(f: (A, Int) => Unit): Unit + + def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean +} + +private final class BitmapIndexedSetNode[A]( + var dataMap: Int, + var nodeMap: Int, + var content: Array[Any], + var originalHashes: Array[Int], + var size: Int, + var cachedJavaKeySetHashCode: Int) extends SetNode[A] { + + import Node._ + import SetNode._ + + /* + assert(checkInvariantContentIsWellTyped()) + assert(checkInvariantSubNodesAreCompacted()) + + private final def checkInvariantSubNodesAreCompacted(): Boolean = + new SetIterator[A](this).size - payloadArity >= 2 * nodeArity + + private final def checkInvariantContentIsWellTyped(): Boolean = { + val predicate1 = TupleLength * payloadArity + nodeArity == content.length + + val predicate2 = Range(0, TupleLength * payloadArity) + .forall(i => content(i).isInstanceOf[SetNode[_]] == false) + + val predicate3 = Range(TupleLength * payloadArity, content.length) + .forall(i => content(i).isInstanceOf[SetNode[_]] == true) + + predicate1 && predicate2 && predicate3 + } + */ + + def getPayload(index: Int): A = content(index).asInstanceOf[A] + + override def getHash(index: Int): Int = originalHashes(index) + + def getNode(index: Int): SetNode[A] = content(content.length - 1 - index).asInstanceOf[SetNode[A]] + + def contains(element: A, originalHash: Int, elementHash: Int, shift: Int): Boolean = { + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + return originalHashes(index) == originalHash && element == this.getPayload(index) + } + + if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + return this.getNode(index).contains(element, originalHash, elementHash, shift + BitPartitionSize) + } + + false + } + + def updated(element: A, originalHash: Int, elementHash: Int, shift: Int): BitmapIndexedSetNode[A] = { + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val element0 = this.getPayload(index) + + if (element0.asInstanceOf[AnyRef] eq element.asInstanceOf[AnyRef]) { + return this + } else { + val element0UnimprovedHash = getHash(index) + val element0Hash = improve(element0UnimprovedHash) + if (originalHash == element0UnimprovedHash && element0 == element) { + return this + } else { + val subNodeNew = mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize) + return copyAndMigrateFromInlineToNode(bitpos, element0Hash, subNodeNew) + } + } + } + if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + + val subNodeNew = subNode.updated(element, originalHash, elementHash, shift + BitPartitionSize) + if (subNode eq subNodeNew) { + return this + } else { + return copyAndSetNode(bitpos, subNode, subNodeNew) + } + } + + copyAndInsertValue(bitpos, element, originalHash, elementHash) + } + /** A variant of `updated` which performs shallow mutations on the root (`this`), and if possible, on immediately + * descendant child nodes (only one level beneath `this`) + * + * The caller should pass a bitmap of child nodes of this node, which this method may mutate. + * If this method may mutate a child node, then if the updated value is located in that child node, it will + * be shallowly mutated (its children will not be mutated). + * + * If instead this method may not mutate the child node in which the to-be-updated value is located, then + * that child will be updated immutably, but the result will be mutably re-inserted as a child of this node. + * + * @param key the key to update + * @param originalHash key.## + * @param keyHash the improved hash + * @param shallowlyMutableNodeMap bitmap of child nodes of this node, which can be shallowly mutated + * during the call to this method + * + * @return Int which is the bitwise OR of shallowlyMutableNodeMap and any freshly created nodes, which will be + * available for mutations in subsequent calls. + */ + def updateWithShallowMutations(element: A, originalHash: Int, elementHash: Int, shift: Int, shallowlyMutableNodeMap: Int): Int = { + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val element0 = getPayload(index) + val element0UnimprovedHash = getHash(index) + if (element0UnimprovedHash == originalHash && element0 == element) { + shallowlyMutableNodeMap + } else { + val element0Hash = improve(element0UnimprovedHash) + val subNodeNew = mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize) + migrateFromInlineToNodeInPlace(bitpos, element0Hash, subNodeNew) + shallowlyMutableNodeMap | bitpos + } + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + val subNodeSize = subNode.size + val subNodeCachedJavaKeySetHashCode = subNode.cachedJavaKeySetHashCode + + var returnNodeMap = shallowlyMutableNodeMap + + val subNodeNew: SetNode[A] = subNode match { + case subNodeBm: BitmapIndexedSetNode[A] if (bitpos & shallowlyMutableNodeMap) != 0 => + subNodeBm.updateWithShallowMutations(element, originalHash, elementHash, shift + BitPartitionSize, 0) + subNodeBm + case _ => + val subNodeNew = subNode.updated(element, originalHash, elementHash, shift + BitPartitionSize) + if (subNodeNew ne subNode) { + returnNodeMap |= bitpos + } + subNodeNew + } + + this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew + this.size = this.size - subNodeSize + subNodeNew.size + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNodeCachedJavaKeySetHashCode + subNodeNew.cachedJavaKeySetHashCode + returnNodeMap + } else { + val dataIx = dataIndex(bitpos) + val idx = dataIx + + val src = this.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = element + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + val dstHashes = insertElement(originalHashes, dataIx, originalHash) + + this.dataMap |= bitpos + this.content = dst + this.originalHashes = dstHashes + this.size += 1 + this.cachedJavaKeySetHashCode += elementHash + shallowlyMutableNodeMap + } + } + + + def removed(element: A, originalHash: Int, elementHash: Int, shift: Int): BitmapIndexedSetNode[A] = { + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val element0 = this.getPayload(index) + + if (element0 == element) { + if (this.payloadArity == 2 && this.nodeArity == 0) { + /* + * Create new node with remaining pair. The new node will a) either become the new root + * returned, or b) unwrapped and inlined during returning. + */ + val newDataMap = if (shift == 0) (dataMap ^ bitpos) else bitposFrom(maskFrom(elementHash, 0)) + if (index == 0) + return new BitmapIndexedSetNode[A](newDataMap, 0, Array(getPayload(1)), Array(originalHashes(1)), size - 1, improve(originalHashes(1))) + else + return new BitmapIndexedSetNode[A](newDataMap, 0, Array(getPayload(0)), Array(originalHashes(0)), size - 1, improve(originalHashes(0))) + } + else return copyAndRemoveValue(bitpos, elementHash) + } else return this + } + + if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + + val subNodeNew = subNode.removed(element, originalHash, elementHash, shift + BitPartitionSize) + + if (subNodeNew eq subNode) return this + + // cache just in case subNodeNew is a hashCollision node, in which in which case a little arithmetic is avoided + // in Vector#length + val subNodeNewSize = subNodeNew.size + + if (subNodeNewSize == 1) { + if (this.size == subNode.size) { + // subNode is the only child (no other data or node children of `this` exist) + // escalate (singleton or empty) result + return subNodeNew.asInstanceOf[BitmapIndexedSetNode[A]] + } else { + // inline value (move to front) + return copyAndMigrateFromNodeToInline(bitpos, elementHash, subNode, subNodeNew) + } + } else if (subNodeNewSize > 1) { + // modify current node (set replacement node) + return copyAndSetNode(bitpos, subNode, subNodeNew) + } + } + + this + } + /** Variant of `removed` which will perform mutation on only the top-level node (`this`), rather than return a new + * node + * + * Should only be called on root nodes, because shift is assumed to be 0 + * + * @param element the element to remove + * @param originalHash the original hash of `element` + * @param elementHash the improved hash of `element` + */ + def removeWithShallowMutations(element: A, originalHash: Int, elementHash: Int): this.type = { + val mask = maskFrom(elementHash, 0) + val bitpos = bitposFrom(mask) + + if ((dataMap & bitpos) != 0) { + val index = indexFrom(dataMap, mask, bitpos) + val element0 = this.getPayload(index) + + if (element0 == element) { + if (this.payloadArity == 2 && this.nodeArity == 0) { + val newDataMap = dataMap ^ bitpos + if (index == 0) { + val newContent = Array[Any](getPayload(1)) + val newOriginalHashes = Array(originalHashes(1)) + val newCachedJavaKeySetHashCode = improve(getHash(1)) + this.content = newContent + this.originalHashes = newOriginalHashes + this.cachedJavaKeySetHashCode = newCachedJavaKeySetHashCode + } else { + val newContent = Array[Any](getPayload(0)) + val newOriginalHashes = Array(originalHashes(0)) + val newCachedJavaKeySetHashCode = improve(getHash(0)) + this.content = newContent + this.originalHashes = newOriginalHashes + this.cachedJavaKeySetHashCode = newCachedJavaKeySetHashCode + } + this.dataMap = newDataMap + this.nodeMap = 0 + this.size = 1 + this + } + else { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length - TupleLength) + + arraycopy(src, 0, dst, 0, idx) + arraycopy(src, idx + TupleLength, dst, idx, src.length - idx - TupleLength) + + val dstHashes = removeElement(originalHashes, dataIx) + + this.dataMap = this.dataMap ^ bitpos + this.content = dst + this.originalHashes = dstHashes + this.size -= 1 + this.cachedJavaKeySetHashCode -= elementHash + this + } + } else this + } else if ((nodeMap & bitpos) != 0) { + val index = indexFrom(nodeMap, mask, bitpos) + val subNode = this.getNode(index) + + val subNodeNew = subNode.removed(element, originalHash, elementHash, BitPartitionSize).asInstanceOf[BitmapIndexedSetNode[A]] + + if (subNodeNew eq subNode) return this + + if (subNodeNew.size == 1) { + if (this.payloadArity == 0 && this.nodeArity == 1) { + this.dataMap = subNodeNew.dataMap + this.nodeMap = subNodeNew.nodeMap + this.content = subNodeNew.content + this.originalHashes = subNodeNew.originalHashes + this.size = subNodeNew.size + this.cachedJavaKeySetHashCode = subNodeNew.cachedJavaKeySetHashCode + this + } else { + migrateFromNodeToInlineInPlace(bitpos, originalHash, elementHash, subNode, subNodeNew) + this + } + } else { + // size must be > 1 + this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew + this.size -= 1 + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNode.cachedJavaKeySetHashCode + subNodeNew.cachedJavaKeySetHashCode + this + } + } else this + } + + def mergeTwoKeyValPairs(key0: A, originalKeyHash0: Int, keyHash0: Int, key1: A, originalKeyHash1: Int, keyHash1: Int, shift: Int): SetNode[A] = { + // assert(key0 != key1) + + if (shift >= HashCodeLength) { + new HashCollisionSetNode[A](originalKeyHash0, keyHash0, Vector(key0, key1)) + } else { + val mask0 = maskFrom(keyHash0, shift) + val mask1 = maskFrom(keyHash1, shift) + + if (mask0 != mask1) { + // unique prefixes, payload fits on same level + val dataMap = bitposFrom(mask0) | bitposFrom(mask1) + val newCachedHashCode = keyHash0 + keyHash1 + + if (mask0 < mask1) { + new BitmapIndexedSetNode[A](dataMap, 0, Array(key0, key1), Array(originalKeyHash0, originalKeyHash1), 2, newCachedHashCode) + } else { + new BitmapIndexedSetNode[A](dataMap, 0, Array(key1, key0), Array(originalKeyHash1, originalKeyHash0), 2, newCachedHashCode) + } + } else { + // identical prefixes, payload must be disambiguated deeper in the trie + val nodeMap = bitposFrom(mask0) + val node = mergeTwoKeyValPairs(key0, originalKeyHash0, keyHash0, key1, originalKeyHash1, keyHash1, shift + BitPartitionSize) + + new BitmapIndexedSetNode[A](0, nodeMap, Array(node), Array.emptyIntArray, node.size, node.cachedJavaKeySetHashCode) + } + } + } + + def hasPayload: Boolean = dataMap != 0 + + def payloadArity: Int = bitCount(dataMap) + + def hasNodes: Boolean = nodeMap != 0 + + def nodeArity: Int = bitCount(nodeMap) + + def dataIndex(bitpos: Int) = bitCount(dataMap & (bitpos - 1)) + + def nodeIndex(bitpos: Int) = bitCount(nodeMap & (bitpos - 1)) + + def copyAndSetNode(bitpos: Int, oldNode: SetNode[A], newNode: SetNode[A]) = { + val idx = this.content.length - 1 - this.nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length) + + // copy 'src' and set 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, src.length) + dst(idx) = newNode + new BitmapIndexedSetNode[A]( + dataMap = dataMap, + nodeMap = nodeMap, + content = dst, + originalHashes = originalHashes, + size = size - oldNode.size + newNode.size, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + newNode.cachedJavaKeySetHashCode + ) + } + + def copyAndInsertValue(bitpos: Int, key: A, originalHash: Int, elementHash: Int) = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length + 1) + + // copy 'src' and insert 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + arraycopy(src, idx, dst, idx + 1, src.length - idx) + val dstHashes = insertElement(originalHashes, dataIx, originalHash) + + new BitmapIndexedSetNode[A](dataMap | bitpos, nodeMap, dst, dstHashes, size + 1, cachedJavaKeySetHashCode + elementHash) + } + + def copyAndSetValue(bitpos: Int, key: A, originalHash: Int, elementHash: Int) = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length) + + // copy 'src' and set 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, src.length) + dst(idx) = key + + new BitmapIndexedSetNode[A](dataMap | bitpos, nodeMap, dst, originalHashes, size, cachedJavaKeySetHashCode) + } + + def copyAndRemoveValue(bitpos: Int, elementHash: Int) = { + val dataIx = dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = this.content + val dst = new Array[Any](src.length - 1) + + // copy 'src' and remove 1 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + arraycopy(src, idx + 1, dst, idx, src.length - idx - 1) + val dstHashes = removeElement(originalHashes, dataIx) + new BitmapIndexedSetNode[A](dataMap ^ bitpos, nodeMap, dst, dstHashes, size - 1, cachedJavaKeySetHashCode - elementHash) + } + + def copyAndMigrateFromInlineToNode(bitpos: Int, elementHash: Int, node: SetNode[A]) = { + val dataIx = dataIndex(bitpos) + val idxOld = TupleLength * dataIx + val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) + + val src = this.content + val dst = new Array[Any](src.length - 1 + 1) + + // copy 'src' and remove 1 element(s) at position 'idxOld' and + // insert 1 element(s) at position 'idxNew' + // assert(idxOld <= idxNew) + arraycopy(src, 0, dst, 0, idxOld) + arraycopy(src, idxOld + 1, dst, idxOld, idxNew - idxOld) + dst(idxNew) = node + arraycopy(src, idxNew + 1, dst, idxNew + 1, src.length - idxNew - 1) + val dstHashes = removeElement(originalHashes, dataIx) + new BitmapIndexedSetNode[A]( + dataMap = dataMap ^ bitpos, + nodeMap = nodeMap | bitpos, + content = dst, originalHashes = dstHashes, + size = size - 1 + node.size, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - elementHash + node.cachedJavaKeySetHashCode + ) + } + /** Variant of `copyAndMigrateFromInlineToNode` which mutates `this` rather than returning a new node. + * + * Note: This method will mutate `this`, and will mutate `this.content` + * + * Mutation of `this.content` will occur as an optimization not possible in maps. Since TupleLength == 1 for sets, + * content array size does not change during inline <-> node migrations. Therefor, since we are updating in-place, + * we reuse this.content by shifting data/nodes around, rather than allocating a new array. + * + * @param bitpos the bit position of the data to migrate to node + * @param keyHash the improved hash of the element currently at `bitpos` + * @param node the node to place at `bitpos` + */ + def migrateFromInlineToNodeInPlace(bitpos: Int, keyHash: Int, node: SetNode[A]): this.type = { + val dataIx = dataIndex(bitpos) + val idxOld = TupleLength * dataIx + val idxNew = this.content.length - TupleLength - nodeIndex(bitpos) + + arraycopy(content, idxOld + TupleLength, content, idxOld, idxNew - idxOld) + content(idxNew) = node + + this.dataMap = this.dataMap ^ bitpos + this.nodeMap = this.nodeMap | bitpos + this.originalHashes = removeElement(originalHashes, dataIx) + this.size = this.size - 1 + node.size + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode + this + } + + def copyAndMigrateFromNodeToInline(bitpos: Int, elementHash: Int, oldNode: SetNode[A], node: SetNode[A]) = { + val idxOld = this.content.length - 1 - nodeIndex(bitpos) + val dataIxNew = dataIndex(bitpos) + val idxNew = TupleLength * dataIxNew + + val src = this.content + val dst = new Array[Any](src.length - 1 + 1) + + // copy 'src' and remove 1 element(s) at position 'idxOld' and + // insert 1 element(s) at position 'idxNew' + // assert(idxOld >= idxNew) + arraycopy(src, 0, dst, 0, idxNew) + dst(idxNew) = node.getPayload(0) + arraycopy(src, idxNew, dst, idxNew + 1, idxOld - idxNew) + arraycopy(src, idxOld + 1, dst, idxOld + 1, src.length - idxOld - 1) + val hash = node.getHash(0) + val dstHashes = insertElement(originalHashes, dataIxNew, hash) + new BitmapIndexedSetNode[A]( + dataMap = dataMap | bitpos, + nodeMap = nodeMap ^ bitpos, + content = dst, + originalHashes = dstHashes, + size = size - oldNode.size + 1, + cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode + ) + } + + /** Variant of `copyAndMigrateFromNodeToInline` which mutates `this` rather than returning a new node. + * + * Note: This method will mutate `this`, and will mutate `this.content` + * + * Mutation of `this.content` will occur as an optimization not possible in maps. Since TupleLength == 1 for sets, + * content array size does not change during inline <-> node migrations. Therefor, since we are updating in-place, + * we reuse this.content by shifting data/nodes around, rather than allocating a new array. + * + * @param bitpos the bit position of the node to migrate inline + * @param oldNode the node currently stored at position `bitpos` + * @param node the node containing the single element to migrate inline + */ + def migrateFromNodeToInlineInPlace(bitpos: Int, originalHash: Int, elementHash: Int, oldNode: SetNode[A], node: SetNode[A]): Unit = { + val idxOld = this.content.length - 1 - nodeIndex(bitpos) + val dataIxNew = dataIndex(bitpos) + val element = node.getPayload(0) + arraycopy(content, dataIxNew, content, dataIxNew + 1, idxOld - dataIxNew) + content(dataIxNew) = element + val hash = node.getHash(0) + val dstHashes = insertElement(originalHashes, dataIxNew, hash) + + this.dataMap = this.dataMap | bitpos + this.nodeMap = this.nodeMap ^ bitpos + this.originalHashes = dstHashes + this.size = this.size - oldNode.size + 1 + this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode + } + + def foreach[U](f: A => U): Unit = { + val thisPayloadArity = payloadArity + var i = 0 + while (i < thisPayloadArity) { + f(getPayload(i)) + i += 1 + } + + val thisNodeArity = nodeArity + var j = 0 + while (j < thisNodeArity) { + getNode(j).foreach(f) + j += 1 + } + } + + def subsetOf(that: SetNode[A], shift: Int): Boolean = if (this eq that) true else that match { + case _: HashCollisionSetNode[A] => false + case node: BitmapIndexedSetNode[A] => + val thisBitmap = this.dataMap | this.nodeMap + val nodeBitmap = node.dataMap | node.nodeMap + + if ((thisBitmap | nodeBitmap) != nodeBitmap) + return false + + var bitmap = thisBitmap & nodeBitmap + var bitsToSkip = numberOfTrailingZeros(bitmap) + + var isValidSubset = true + while (isValidSubset && bitsToSkip < HashCodeLength) { + val bitpos = bitposFrom(bitsToSkip) + + isValidSubset = + if ((this.dataMap & bitpos) != 0) { + if ((node.dataMap & bitpos) != 0) { + // Data x Data + val payload0 = this.getPayload(indexFrom(this.dataMap, bitpos)) + val payload1 = node.getPayload(indexFrom(node.dataMap, bitpos)) + payload0 == payload1 + } else { + // Data x Node + val thisDataIndex = indexFrom(this.dataMap, bitpos) + val payload = this.getPayload(thisDataIndex) + val subNode = that.getNode(indexFrom(node.nodeMap, bitpos)) + val elementUnimprovedHash = getHash(thisDataIndex) + val elementHash = improve(elementUnimprovedHash) + subNode.contains(payload, elementUnimprovedHash, elementHash, shift + BitPartitionSize) + } + } else { + // Node x Node + val subNode0 = this.getNode(indexFrom(this.nodeMap, bitpos)) + val subNode1 = node.getNode(indexFrom(node.nodeMap, bitpos)) + subNode0.subsetOf(subNode1, shift + BitPartitionSize) + } + + val newBitmap = bitmap ^ bitpos + bitmap = newBitmap + bitsToSkip = numberOfTrailingZeros(newBitmap) + } + isValidSubset + } + + override def filterImpl(pred: A => Boolean, flipped: Boolean): BitmapIndexedSetNode[A] = { + if (size == 0) this + else if (size == 1) { + if (pred(getPayload(0)) != flipped) this else SetNode.empty + } else if (nodeMap == 0) { + // Performance optimization for nodes of depth 1: + // + // this node has no "node" children, all children are inlined data elems, therefor logic is significantly simpler + // approach: + // * traverse the content array, accumulating in `newDataMap: Int` any bit positions of keys which pass the filter + // * (bitCount(newDataMap) * TupleLength) tells us the new content array and originalHashes array size, so now perform allocations + // * traverse the content array once more, placing each passing element (according to `newDatamap`) in the new content and originalHashes arrays + // + // note: + // * this optimization significantly improves performance of not only small trees, but also larger trees, since + // even non-root nodes are affected by this improvement, and large trees will consist of many nodes as + // descendants + // + val minimumIndex: Int = Integer.numberOfTrailingZeros(dataMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(dataMap) + + var newDataMap = 0 + var newCachedHashCode = 0 + var dataIndex = 0 + + var i = minimumIndex + + while(i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val passed = pred(payload) != flipped + + if (passed) { + newDataMap |= bitpos + newCachedHashCode += improve(getHash(dataIndex)) + } + + dataIndex += 1 + } + + i += 1 + } + + if (newDataMap == 0) { + SetNode.empty + } else if (newDataMap == dataMap) { + this + } else { + val newSize = Integer.bitCount(newDataMap) + val newContent = new Array[Any](newSize) + val newOriginalHashCodes = new Array[Int](newSize) + val newMaximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(newDataMap) + + var j = Integer.numberOfTrailingZeros(newDataMap) + + var newDataIndex = 0 + + while (j < newMaximumIndex) { + val bitpos = bitposFrom(j) + if ((bitpos & newDataMap) != 0) { + val oldIndex = indexFrom(dataMap, bitpos) + newContent(newDataIndex) = content(oldIndex) + newOriginalHashCodes(newDataIndex) = originalHashes(oldIndex) + newDataIndex += 1 + } + j += 1 + } + + new BitmapIndexedSetNode(newDataMap, 0, newContent, newOriginalHashCodes, newSize, newCachedHashCode) + } + } else { + val allMap = dataMap | nodeMap + val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) + + var oldDataPassThrough = 0 + + // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data + var nodeMigrateToDataTargetMap = 0 + + // TODO: When filtering results in a single-elem node, simply `(A, originalHash, improvedHash)` could be returned, + // rather than a singleton node (to avoid pointlessly allocating arrays, nodes, which would just be inlined in + // the parent anyways). This would probably involve changing the return type of filterImpl to `AnyRef` which may + // return at runtime a SetNode[A], or a tuple of (A, Int, Int) + + // the queue of single-element, post-filter nodes + var nodesToMigrateToData: mutable.Queue[SetNode[A]] = null + + // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node + var nodesToPassThroughMap = 0 + + // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself + // These are stored for later inclusion into the final `content` array + // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) + var mapOfNewNodes = 0 + // each bit in `mapOfNewNodes` corresponds to one element in this queue + var newNodes: mutable.Queue[SetNode[A]] = null + + var newDataMap = 0 + var newNodeMap = 0 + var newSize = 0 + var newCachedHashCode = 0 + + var dataIndex = 0 + var nodeIndex = 0 + + var i = minimumIndex + while (i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val passed = pred(payload) != flipped + + if (passed) { + newDataMap |= bitpos + oldDataPassThrough |= bitpos + newSize += 1 + newCachedHashCode += improve(getHash(dataIndex)) + } + + dataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + val oldSubNode = getNode(nodeIndex) + val newSubNode = oldSubNode.filterImpl(pred, flipped) + + newSize += newSubNode.size + newCachedHashCode += newSubNode.cachedJavaKeySetHashCode + + // if (newSubNode.size == 0) do nothing (drop it) + if (newSubNode.size > 1) { + newNodeMap |= bitpos + if (oldSubNode eq newSubNode) { + nodesToPassThroughMap |= bitpos + } else { + mapOfNewNodes |= bitpos + if (newNodes eq null) { + newNodes = mutable.Queue.empty + } + newNodes += newSubNode + } + } else if (newSubNode.size == 1) { + newDataMap |= bitpos + nodeMigrateToDataTargetMap |= bitpos + if (nodesToMigrateToData eq null) { + nodesToMigrateToData = mutable.Queue.empty + } + nodesToMigrateToData += newSubNode + } + + nodeIndex += 1 + } + + i += 1 + } + + this.newNodeFrom( + newSize = newSize, + newDataMap = newDataMap, + newNodeMap = newNodeMap, + minimumIndex = minimumIndex, + oldDataPassThrough = oldDataPassThrough, + nodesToPassThroughMap = nodesToPassThroughMap, + nodeMigrateToDataTargetMap = nodeMigrateToDataTargetMap, + nodesToMigrateToData = nodesToMigrateToData, + mapOfNewNodes = mapOfNewNodes, + newNodes = newNodes, + newCachedHashCode = newCachedHashCode + ) + } + } + + override def diff(that: SetNode[A], shift: Int): BitmapIndexedSetNode[A] = that match { + case bm: BitmapIndexedSetNode[A] => + if (size == 0) this + else if (size == 1) { + val h = getHash(0) + if (that.contains(getPayload(0), h, improve(h), shift)) SetNode.empty else this + } else { + val allMap = dataMap | nodeMap + val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap) + val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) + + var oldDataPassThrough = 0 + + // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data + var nodeMigrateToDataTargetMap = 0 + // the queue of single-element, post-filter nodes + var nodesToMigrateToData: mutable.Queue[SetNode[A]] = null + + // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node + var nodesToPassThroughMap = 0 + + // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself + // These are stored for later inclusion into the final `content` array + // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular) + var mapOfNewNodes = 0 + // each bit in `mapOfNewNodes` corresponds to one element in this queue + var newNodes: mutable.Queue[SetNode[A]] = null + + var newDataMap = 0 + var newNodeMap = 0 + var newSize = 0 + var newCachedHashCode = 0 + + var dataIndex = 0 + var nodeIndex = 0 + + var i = minimumIndex + while (i < maximumIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & dataMap) != 0) { + val payload = getPayload(dataIndex) + val originalHash = getHash(dataIndex) + val hash = improve(originalHash) + + if (!bm.contains(payload, originalHash, hash, shift)) { + newDataMap |= bitpos + oldDataPassThrough |= bitpos + newSize += 1 + newCachedHashCode += hash + } + + dataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + val oldSubNode = getNode(nodeIndex) + + val newSubNode: SetNode[A] = + if ((bitpos & bm.dataMap) != 0) { + val thatDataIndex = indexFrom(bm.dataMap, bitpos) + val thatPayload = bm.getPayload(thatDataIndex) + val thatOriginalHash = bm.getHash(thatDataIndex) + val thatHash = improve(thatOriginalHash) + oldSubNode.removed(thatPayload, thatOriginalHash, thatHash, shift + BitPartitionSize) + } else if ((bitpos & bm.nodeMap) != 0) { + oldSubNode.diff(bm.getNode(indexFrom(bm.nodeMap, bitpos)), shift + BitPartitionSize) + } else { + oldSubNode + } + + newSize += newSubNode.size + newCachedHashCode += newSubNode.cachedJavaKeySetHashCode + + // if (newSubNode.size == 0) do nothing (drop it) + if (newSubNode.size > 1) { + newNodeMap |= bitpos + if (oldSubNode eq newSubNode) { + nodesToPassThroughMap |= bitpos + } else { + mapOfNewNodes |= bitpos + if (newNodes eq null) { + newNodes = mutable.Queue.empty + } + newNodes += newSubNode + } + } else if (newSubNode.size == 1) { + newDataMap |= bitpos + nodeMigrateToDataTargetMap |= bitpos + if (nodesToMigrateToData eq null) { + nodesToMigrateToData = mutable.Queue.empty + } + nodesToMigrateToData += newSubNode + } + + nodeIndex += 1 + } + + i += 1 + } + this.newNodeFrom( + newSize = newSize, + newDataMap = newDataMap, + newNodeMap = newNodeMap, + minimumIndex = minimumIndex, + oldDataPassThrough = oldDataPassThrough, + nodesToPassThroughMap = nodesToPassThroughMap, + nodeMigrateToDataTargetMap = nodeMigrateToDataTargetMap, + nodesToMigrateToData = nodesToMigrateToData, + mapOfNewNodes = mapOfNewNodes, + newNodes = newNodes, + newCachedHashCode = newCachedHashCode + ) + } + case _: HashCollisionSetNode[A] => + // this branch should never happen, because HashCollisionSetNodes and BitMapIndexedSetNodes do not occur at the + // same depth + throw new RuntimeException("BitmapIndexedSetNode diff HashCollisionSetNode") + } + + /** Utility method only for use in `diff` and `filterImpl` + * + * @param newSize the size of the new SetNode + * @param newDataMap the dataMap of the new SetNode + * @param newNodeMap the nodeMap of the new SetNode + * @param minimumIndex the minimum index (in range of [0, 31]) for which there are sub-nodes or data beneath the new + * SetNode + * @param oldDataPassThrough bitmap representing all the data that are just passed from `this` to the new + * SetNode + * @param nodesToPassThroughMap bitmap representing all nodes that are just passed from `this` to the new SetNode + * @param nodeMigrateToDataTargetMap bitmap representing all positions which will now be data in the new SetNode, + * but which were nodes in `this` + * @param nodesToMigrateToData a queue (in order of child position) of single-element nodes, which will be migrated + * to data, in positions in the `nodeMigrateToDataTargetMap` + * @param mapOfNewNodes bitmap of positions of new nodes to include in the new SetNode + * @param newNodes queue in order of child position, of all new nodes to include in the new SetNode + * @param newCachedHashCode the cached java keyset hashcode of the new SetNode + */ + private[this] def newNodeFrom( + newSize: Int, + newDataMap: Int, + newNodeMap: Int, + minimumIndex: Int, + oldDataPassThrough: Int, + nodesToPassThroughMap: Int, + nodeMigrateToDataTargetMap: Int, + nodesToMigrateToData: mutable.Queue[SetNode[A]], + mapOfNewNodes: Int, + newNodes: mutable.Queue[SetNode[A]], + newCachedHashCode: Int): BitmapIndexedSetNode[A] = { + if (newSize == 0) { + SetNode.empty + } else if (newSize == size) { + this + } else { + val newDataSize = bitCount(newDataMap) + val newContentSize = newDataSize + bitCount(newNodeMap) + val newContent = new Array[Any](newContentSize) + val newOriginalHashes = new Array[Int](newDataSize) + + val newAllMap = newDataMap | newNodeMap + val maxIndex = Node.BranchingFactor - Integer.numberOfLeadingZeros(newAllMap) + + // note: We MUST start from the minimum index in the old (`this`) node, otherwise `old{Node,Data}Index` will + // not be incremented properly. Otherwise we could have started at Integer.numberOfTrailingZeroes(newAllMap) + var i = minimumIndex + + var oldDataIndex = 0 + var oldNodeIndex = 0 + + var newDataIndex = 0 + var newNodeIndex = 0 + + while (i < maxIndex) { + val bitpos = bitposFrom(i) + + if ((bitpos & oldDataPassThrough) != 0) { + newContent(newDataIndex) = getPayload(oldDataIndex) + newOriginalHashes(newDataIndex) = getHash(oldDataIndex) + newDataIndex += 1 + oldDataIndex += 1 + } else if ((bitpos & nodesToPassThroughMap) != 0) { + newContent(newContentSize - newNodeIndex - 1) = getNode(oldNodeIndex) + newNodeIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & nodeMigrateToDataTargetMap) != 0) { + // we need not check for null here. If nodeMigrateToDataTargetMap != 0, then nodesMigrateToData must not be null + val node = nodesToMigrateToData.dequeue() + newContent(newDataIndex) = node.getPayload(0) + newOriginalHashes(newDataIndex) = node.getHash(0) + newDataIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & mapOfNewNodes) != 0) { + // we need not check for null here. If mapOfNewNodes != 0, then newNodes must not be null + newContent(newContentSize - newNodeIndex - 1) = newNodes.dequeue() + newNodeIndex += 1 + oldNodeIndex += 1 + } else if ((bitpos & dataMap) != 0) { + oldDataIndex += 1 + } else if ((bitpos & nodeMap) != 0) { + oldNodeIndex += 1 + } + + i += 1 + } + + new BitmapIndexedSetNode[A](newDataMap, newNodeMap, newContent, newOriginalHashes, newSize, newCachedHashCode) + } + } + + + override def equals(that: Any): Boolean = + that match { + case node: BitmapIndexedSetNode[_] => + (this eq node) || + (this.cachedJavaKeySetHashCode == node.cachedJavaKeySetHashCode) && + (this.nodeMap == node.nodeMap) && + (this.dataMap == node.dataMap) && + (this.size == node.size) && + java.util.Arrays.equals(this.originalHashes, node.originalHashes) && + deepContentEquality(this.content, node.content, content.length) + case _ => false + } + + @`inline` private def deepContentEquality(a1: Array[Any], a2: Array[Any], length: Int): Boolean = { + if (a1 eq a2) + true + else { + var isEqual = true + var i = 0 + + while (isEqual && i < length) { + isEqual = a1(i) == a2(i) + i += 1 + } + + isEqual + } + } + + override def hashCode(): Int = + throw new UnsupportedOperationException("Trie nodes do not support hashing.") + + override def copy(): BitmapIndexedSetNode[A] = { + val contentClone = content.clone() + val contentLength = contentClone.length + var i = bitCount(dataMap) + while (i < contentLength) { + contentClone(i) = contentClone(i).asInstanceOf[SetNode[A]].copy() + i += 1 + } + new BitmapIndexedSetNode[A](dataMap, nodeMap, contentClone, originalHashes.clone(), size, cachedJavaKeySetHashCode) + } + + override def concat(that: SetNode[A], shift: Int): BitmapIndexedSetNode[A] = that match { + case bm: BitmapIndexedSetNode[A] => + if (size == 0) return bm + else if (bm.size == 0 || (bm eq this)) return this + else if (bm.size == 1) { + val originalHash = bm.getHash(0) + return this.updated(bm.getPayload(0), originalHash, improve(originalHash), shift) + } + + // if we go through the merge and the result does not differ from `this`, we can just return `this`, to improve sharing + // So, `anyChangesMadeSoFar` will be set to `true` as soon as we encounter a difference between the + // currently-being-computed result, and `this` + var anyChangesMadeSoFar = false + + // bitmap containing `1` in any position that has any descendant in either left or right, either data or node + val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap + + // minimumIndex is inclusive -- it is the first index for which there is data or nodes + val minimumBitPos: Int = Node.bitposFrom(Integer.numberOfTrailingZeros(allMap)) + // maximumIndex is inclusive -- it is the last index for which there is data or nodes + // it could not be exclusive, because then upper bound in worst case (Node.BranchingFactor) would be out-of-bound + // of int bitposition representation + val maximumBitPos: Int = Node.bitposFrom(Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - 1) + + var leftNodeRightNode = 0 + var leftDataRightNode = 0 + var leftNodeRightData = 0 + var leftDataOnly = 0 + var rightDataOnly = 0 + var leftNodeOnly = 0 + var rightNodeOnly = 0 + var leftDataRightDataMigrateToNode = 0 + var leftDataRightDataLeftOverwrites = 0 + + var dataToNodeMigrationTargets = 0 + + { + var bitpos = minimumBitPos + var leftIdx = 0 + var rightIdx = 0 + var finished = false + + while (!finished) { + + if ((bitpos & dataMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + if (getHash(leftIdx) == bm.getHash(rightIdx) && getPayload(leftIdx) == bm.getPayload(rightIdx)) { + leftDataRightDataLeftOverwrites |= bitpos + } else { + leftDataRightDataMigrateToNode |= bitpos + dataToNodeMigrationTargets |= Node.bitposFrom(Node.maskFrom(improve(getHash(leftIdx)), shift)) + } + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + leftDataRightNode |= bitpos + } else { + leftDataOnly |= bitpos + } + leftIdx += 1 + } else if ((bitpos & nodeMap) != 0) { + if ((bitpos & bm.dataMap) != 0) { + leftNodeRightData |= bitpos + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + leftNodeRightNode |= bitpos + } else { + leftNodeOnly |= bitpos + } + } else if ((bitpos & bm.dataMap) != 0) { + rightDataOnly |= bitpos + rightIdx += 1 + } else if ((bitpos & bm.nodeMap) != 0) { + rightNodeOnly |= bitpos + } + + if (bitpos == maximumBitPos) { + finished = true + } else { + bitpos = bitpos << 1 + } + } + } + + + val newDataMap = leftDataOnly | rightDataOnly | leftDataRightDataLeftOverwrites + + val newNodeMap = + leftNodeRightNode | + leftDataRightNode | + leftNodeRightData | + leftNodeOnly | + rightNodeOnly | + dataToNodeMigrationTargets + + + if ((newDataMap == (leftDataOnly | leftDataRightDataLeftOverwrites)) && (newNodeMap == leftNodeOnly)) { + // nothing from `bm` will make it into the result -- return early + return this + } + + val newDataSize = bitCount(newDataMap) + val newContentSize = newDataSize + bitCount(newNodeMap) + + val newContent = new Array[Any](newContentSize) + val newOriginalHashes = new Array[Int](newDataSize) + var newSize = 0 + var newCachedHashCode = 0 + + { + var leftDataIdx = 0 + var rightDataIdx = 0 + var leftNodeIdx = 0 + var rightNodeIdx = 0 + + val nextShift = shift + Node.BitPartitionSize + + var compressedDataIdx = 0 + var compressedNodeIdx = 0 + + var bitpos = minimumBitPos + var finished = false + + while (!finished) { + + if ((bitpos & leftNodeRightNode) != 0) { + val leftNode = getNode(leftNodeIdx) + val newNode = leftNode.concat(bm.getNode(rightNodeIdx), nextShift) + if (leftNode ne newNode) { + anyChangesMadeSoFar = true + } + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + leftNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + + } else if ((bitpos & leftDataRightNode) != 0) { + anyChangesMadeSoFar = true + val newNode = { + val n = bm.getNode(rightNodeIdx) + val leftPayload = getPayload(leftDataIdx) + val leftOriginalHash = getHash(leftDataIdx) + val leftImproved = improve(leftOriginalHash) + n.updated(leftPayload, leftOriginalHash, leftImproved, nextShift) + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + leftDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } + else if ((bitpos & leftNodeRightData) != 0) { + val newNode = { + val rightOriginalHash = bm.getHash(rightDataIdx) + val leftNode = getNode(leftNodeIdx) + val updated = leftNode.updated( + element = bm.getPayload(rightDataIdx), + originalHash = bm.getHash(rightDataIdx), + hash = improve(rightOriginalHash), + shift = nextShift + ) + if (updated ne leftNode) { + anyChangesMadeSoFar = true + } + updated + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftNodeIdx += 1 + rightDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + + } else if ((bitpos & leftDataOnly) != 0) { + val originalHash = originalHashes(leftDataIdx) + newContent(compressedDataIdx) = getPayload(leftDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + leftDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + } else if ((bitpos & rightDataOnly) != 0) { + anyChangesMadeSoFar = true + val originalHash = bm.originalHashes(rightDataIdx) + newContent(compressedDataIdx) = bm.getPayload(rightDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + rightDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + } else if ((bitpos & leftNodeOnly) != 0) { + val newNode = getNode(leftNodeIdx) + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & rightNodeOnly) != 0) { + anyChangesMadeSoFar = true + val newNode = bm.getNode(rightNodeIdx) + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + rightNodeIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & leftDataRightDataMigrateToNode) != 0) { + anyChangesMadeSoFar = true + val newNode = { + val leftOriginalHash = getHash(leftDataIdx) + val rightOriginalHash = bm.getHash(rightDataIdx) + + bm.mergeTwoKeyValPairs( + getPayload(leftDataIdx), leftOriginalHash, improve(leftOriginalHash), + bm.getPayload(rightDataIdx), rightOriginalHash, improve(rightOriginalHash), + nextShift + ) + } + + newContent(newContentSize - compressedNodeIdx - 1) = newNode + compressedNodeIdx += 1 + leftDataIdx += 1 + rightDataIdx += 1 + newSize += newNode.size + newCachedHashCode += newNode.cachedJavaKeySetHashCode + } else if ((bitpos & leftDataRightDataLeftOverwrites) != 0) { + val originalHash = bm.originalHashes(rightDataIdx) + newContent(compressedDataIdx) = bm.getPayload(rightDataIdx).asInstanceOf[AnyRef] + newOriginalHashes(compressedDataIdx) = originalHash + + compressedDataIdx += 1 + rightDataIdx += 1 + newSize += 1 + newCachedHashCode += improve(originalHash) + leftDataIdx += 1 + } + + if (bitpos == maximumBitPos) { + finished = true + } else { + bitpos = bitpos << 1 + } + } + } + + if (anyChangesMadeSoFar) + new BitmapIndexedSetNode( + dataMap = newDataMap, + nodeMap = newNodeMap, + content = newContent, + originalHashes = newOriginalHashes, + size = newSize, + cachedJavaKeySetHashCode = newCachedHashCode + ) + else this + + case _ => + // should never happen -- hash collisions are never at the same level as bitmapIndexedSetNodes + throw new UnsupportedOperationException("Cannot concatenate a HashCollisionSetNode with a BitmapIndexedSetNode") + } + + override def foreachWithHash(f: (A, Int) => Unit): Unit = { + val iN = payloadArity // arity doesn't change during this operation + var i = 0 + while (i < iN) { + f(getPayload(i), getHash(i)) + i += 1 + } + + val jN = nodeArity // arity doesn't change during this operation + var j = 0 + while (j < jN) { + getNode(j).foreachWithHash(f) + j += 1 + } + } + + override def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean = { + val thisPayloadArity = payloadArity + var pass = true + var i = 0 + while (i < thisPayloadArity && pass) { + pass &&= f(getPayload(i), getHash(i)) + i += 1 + } + + val thisNodeArity = nodeArity + var j = 0 + while (j < thisNodeArity && pass) { + pass &&= getNode(j).foreachWithHashWhile(f) + j += 1 + } + pass + } +} + +private final class HashCollisionSetNode[A](val originalHash: Int, val hash: Int, var content: Vector[A]) extends SetNode[A] { + + import Node._ + + require(content.length >= 2) + + def contains(element: A, originalHash: Int, hash: Int, shift: Int): Boolean = + this.hash == hash && content.contains(element) + + def updated(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] = + if (this.contains(element, originalHash, hash, shift)) { + this + } else { + new HashCollisionSetNode[A](originalHash, hash, content.appended(element)) + } + + /** + * Remove an element from the hash collision node. + * + * When after deletion only one element remains, we return a bit-mapped indexed node with a + * singleton element and a hash-prefix for trie level 0. This node will be then a) either become + * the new root, or b) unwrapped and inlined deeper in the trie. + */ + def removed(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] = + if (!this.contains(element, originalHash, hash, shift)) { + this + } else { + val updatedContent = content.filterNot(element0 => element0 == element) + // assert(updatedContent.size == content.size - 1) + + updatedContent.size match { + case 1 => new BitmapIndexedSetNode[A](bitposFrom(maskFrom(hash, 0)), 0, Array(updatedContent(0)), Array(originalHash), 1, hash) + case _ => new HashCollisionSetNode[A](originalHash, hash, updatedContent) + } + } + + def hasNodes: Boolean = false + + def nodeArity: Int = 0 + + def getNode(index: Int): SetNode[A] = + throw new IndexOutOfBoundsException("No sub-nodes present in hash-collision leaf node.") + + def hasPayload: Boolean = true + + def payloadArity: Int = content.length + + def getPayload(index: Int): A = content(index) + + override def getHash(index: Int): Int = originalHash + + def size: Int = content.length + + def foreach[U](f: A => U): Unit = { + val iter = content.iterator + while (iter.hasNext) { + f(iter.next()) + } + } + + + override def cachedJavaKeySetHashCode: Int = size * hash + + def subsetOf(that: SetNode[A], shift: Int): Boolean = if (this eq that) true else that match { + case node: HashCollisionSetNode[A] => + this.payloadArity <= node.payloadArity && this.content.forall(node.content.contains) + case _ => + false + } + + override def filterImpl(pred: A => Boolean, flipped: Boolean): SetNode[A] = { + val newContent = content.filterImpl(pred, flipped) + val newContentLength = newContent.length + if (newContentLength == 0) { + SetNode.empty + } else if (newContentLength == 1) { + new BitmapIndexedSetNode[A](bitposFrom(maskFrom(hash, 0)), 0, Array(newContent.head), Array(originalHash), 1, hash) + } else if (newContent.length == content.length) this + else new HashCollisionSetNode(originalHash, hash, newContent) + } + + override def diff(that: SetNode[A], shift: Int): SetNode[A] = + filterImpl(that.contains(_, originalHash, hash, shift), true) + + override def equals(that: Any): Boolean = + that match { + case node: HashCollisionSetNode[_] => + (this eq node) || + (this.hash == node.hash) && + (this.content.size == node.content.size) && + this.content.forall(node.content.contains) + case _ => false + } + + override def hashCode(): Int = + throw new UnsupportedOperationException("Trie nodes do not support hashing.") + + override def copy() = new HashCollisionSetNode[A](originalHash, hash, content) + + override def concat(that: SetNode[A], shift: Int): SetNode[A] = that match { + case hc: HashCollisionSetNode[A] => + if (hc eq this) { + this + } else { + var newContent: VectorBuilder[A] = null + val iter = hc.content.iterator + while (iter.hasNext) { + val nextPayload = iter.next() + if (!content.contains(nextPayload)) { + if (newContent eq null) { + newContent = new VectorBuilder() + newContent.addAll(this.content) + } + newContent.addOne(nextPayload) + } + } + if (newContent eq null) this else new HashCollisionSetNode(originalHash, hash, newContent.result()) + } + case _: BitmapIndexedSetNode[A] => + // should never happen -- hash collisions are never at the same level as bitmapIndexedSetNodes + throw new UnsupportedOperationException("Cannot concatenate a HashCollisionSetNode with a BitmapIndexedSetNode") + } + + override def foreachWithHash(f: (A, Int) => Unit): Unit = { + val iter = content.iterator + while (iter.hasNext) { + val next = iter.next() + f(next.asInstanceOf[A], originalHash) + } + } + + override def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean = { + var stillGoing = true + val iter = content.iterator + while (iter.hasNext && stillGoing) { + val next = iter.next() + stillGoing &&= f(next.asInstanceOf[A], originalHash) + } + stillGoing + } +} + +private final class SetIterator[A](rootNode: SetNode[A]) + extends ChampBaseIterator[SetNode[A]](rootNode) with Iterator[A] { + + def next() = { + if (!hasNext) + throw new NoSuchElementException + + val payload = currentValueNode.getPayload(currentValueCursor) + currentValueCursor += 1 + + payload + } + +} + +private final class SetReverseIterator[A](rootNode: SetNode[A]) + extends ChampBaseReverseIterator[SetNode[A]](rootNode) with Iterator[A] { + + def next(): A = { + if (!hasNext) + throw new NoSuchElementException + + val payload = currentValueNode.getPayload(currentValueCursor) + currentValueCursor -= 1 + + payload + } + +} + +private final class SetHashIterator[A](rootNode: SetNode[A]) + extends ChampBaseIterator[SetNode[A]](rootNode) with Iterator[AnyRef] { + private[this] var hash = 0 + override def hashCode(): Int = hash + + def next(): AnyRef = { + if (!hasNext) + throw new NoSuchElementException + + hash = currentValueNode.getHash(currentValueCursor) + currentValueCursor += 1 + this + } + +} + + +/** + * $factoryInfo + * + * @define Coll `immutable.HashSet` + * @define coll immutable champ hash set + */ +@SerialVersionUID(3L) +object HashSet extends IterableFactory[HashSet] { + + @transient + private final val EmptySet = new HashSet(SetNode.empty) + + def empty[A]: HashSet[A] = + EmptySet.asInstanceOf[HashSet[A]] + + def from[A](source: collection.IterableOnce[A]): HashSet[A] = + source match { + case hs: HashSet[A] => hs + case _ if source.knownSize == 0 => empty[A] + case _ => (newBuilder[A] ++= source).result() + } + + /** Create a new Builder which can be reused after calling `result()` without an + * intermediate call to `clear()` in order to build multiple related results. + */ + def newBuilder[A]: ReusableBuilder[A, HashSet[A]] = new HashSetBuilder +} + +/** Builder for HashSet. + * $multipleResults + */ +private[collection] final class HashSetBuilder[A] extends ReusableBuilder[A, HashSet[A]] { + import Node._ + import SetNode._ + + private def newEmptyRootNode = new BitmapIndexedSetNode[A](0, 0, Array.emptyObjectArray.asInstanceOf[Array[Any]], Array.emptyIntArray, 0, 0) + + /** The last given out HashSet as a return value of `result()`, if any, otherwise null. + * Indicates that on next add, the elements should be copied to an identical structure, before continuing + * mutations. */ + private var aliased: HashSet[A] = _ + + private def isAliased: Boolean = aliased != null + + /** The root node of the partially build hashmap */ + private var rootNode: BitmapIndexedSetNode[A] = newEmptyRootNode + + /** Inserts element `elem` into array `as` at index `ix`, shifting right the trailing elems */ + private def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { + if (ix < 0) throw new ArrayIndexOutOfBoundsException + if (ix > as.length) throw new ArrayIndexOutOfBoundsException + val result = new Array[Int](as.length + 1) + arraycopy(as, 0, result, 0, ix) + result(ix) = elem + arraycopy(as, ix, result, ix + 1, as.length - ix) + result + } + + /** Inserts key-value into the bitmapIndexMapNode. Requires that this is a new key-value pair */ + private def insertValue[A1 >: A](bm: BitmapIndexedSetNode[A], bitpos: Int, key: A, originalHash: Int, keyHash: Int): Unit = { + val dataIx = bm.dataIndex(bitpos) + val idx = TupleLength * dataIx + + val src = bm.content + val dst = new Array[Any](src.length + TupleLength) + + // copy 'src' and insert 2 element(s) at position 'idx' + arraycopy(src, 0, dst, 0, idx) + dst(idx) = key + arraycopy(src, idx, dst, idx + TupleLength, src.length - idx) + + val dstHashes = insertElement(bm.originalHashes, dataIx, originalHash) + + bm.dataMap = bm.dataMap | bitpos + bm.content = dst + bm.originalHashes = dstHashes + bm.size += 1 + bm.cachedJavaKeySetHashCode += keyHash + } + + /** Mutates `bm` to replace inline data at bit position `bitpos` with updated key/value */ + private def setValue[A1 >: A](bm: BitmapIndexedSetNode[A], bitpos: Int, elem: A): Unit = { + val dataIx = bm.dataIndex(bitpos) + val idx = TupleLength * dataIx + bm.content(idx) = elem + } + + def update(setNode: SetNode[A], element: A, originalHash: Int, elementHash: Int, shift: Int): Unit = + setNode match { + case bm: BitmapIndexedSetNode[A] => + val mask = maskFrom(elementHash, shift) + val bitpos = bitposFrom(mask) + + if ((bm.dataMap & bitpos) != 0) { + val index = indexFrom(bm.dataMap, mask, bitpos) + val element0 = bm.getPayload(index) + val element0UnimprovedHash = bm.getHash(index) + + if (element0UnimprovedHash == originalHash && element0 == element) { + setValue(bm, bitpos, element0) + } else { + val element0Hash = improve(element0UnimprovedHash) + val subNodeNew = bm.mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize) + bm.migrateFromInlineToNodeInPlace(bitpos, element0Hash, subNodeNew) + } + } else if ((bm.nodeMap & bitpos) != 0) { + val index = indexFrom(bm.nodeMap, mask, bitpos) + val subNode = bm.getNode(index) + val beforeSize = subNode.size + val beforeHashCode = subNode.cachedJavaKeySetHashCode + update(subNode, element, originalHash, elementHash, shift + BitPartitionSize) + bm.size += subNode.size - beforeSize + bm.cachedJavaKeySetHashCode += subNode.cachedJavaKeySetHashCode - beforeHashCode + } else { + insertValue(bm, bitpos, element, originalHash, elementHash) + } + case hc: HashCollisionSetNode[A] => + val index = hc.content.indexOf(element) + if (index < 0) { + hc.content = hc.content.appended(element) + } else { + hc.content = hc.content.updated(index, element) + } + } + + /** If currently referencing aliased structure, copy elements to new mutable structure */ + private def ensureUnaliased():Unit = { + if (isAliased) copyElems() + aliased = null + } + + /** Copy elements to new mutable structure */ + private def copyElems(): Unit = { + rootNode = rootNode.copy() + } + + override def result(): HashSet[A] = + if (rootNode.size == 0) { + HashSet.empty + } else if (aliased != null) { + aliased + } else { + aliased = new HashSet(rootNode) + releaseFence() + aliased + } + + override def addOne(elem: A): this.type = { + ensureUnaliased() + val h = elem.## + val im = improve(h) + update(rootNode, elem, h, im, 0) + this + } + + override def addAll(xs: IterableOnce[A]) = { + ensureUnaliased() + xs match { + case hm: HashSet[A] => + new ChampBaseIterator[SetNode[A]](hm.rootNode) { + while(hasNext) { + val originalHash = currentValueNode.getHash(currentValueCursor) + update( + setNode = rootNode, + element = currentValueNode.getPayload(currentValueCursor), + originalHash = originalHash, + elementHash = improve(originalHash), + shift = 0 + ) + currentValueCursor += 1 + } + } + case other => + val it = other.iterator + while(it.hasNext) addOne(it.next()) + } + + this + } + + override def clear(): Unit = { + aliased = null + if (rootNode.size > 0) { + // if rootNode is empty, we will not have given it away anyways, we instead give out the reused Set.empty + rootNode = newEmptyRootNode + } + } + + private[collection] def size: Int = rootNode.size + + override def knownSize: Int = rootNode.size +} diff --git a/tests/pos-special/stdlib/collection/immutable/IntMap.scala b/tests/pos-special/stdlib/collection/immutable/IntMap.scala new file mode 100644 index 000000000000..240821b11460 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/IntMap.scala @@ -0,0 +1,502 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import scala.collection.generic.{BitOperations, DefaultSerializationProxy} +import scala.collection.mutable.{Builder, ImmutableBuilder} +import scala.annotation.tailrec +import scala.annotation.unchecked.uncheckedVariance +import scala.language.implicitConversions + +/** Utility class for integer maps. + */ +private[immutable] object IntMapUtils extends BitOperations.Int { + def branchMask(i: Int, j: Int) = highestOneBit(i ^ j) + + def join[T](p1: Int, t1: IntMap[T], p2: Int, t2: IntMap[T]): IntMap[T] = { + val m = branchMask(p1, p2) + val p = mask(p1, m) + if (zero(p1, m)) IntMap.Bin(p, m, t1, t2) + else IntMap.Bin(p, m, t2, t1) + } + + def bin[T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]): IntMap[T] = (left, right) match { + case (left, IntMap.Nil) => left + case (IntMap.Nil, right) => right + case (left, right) => IntMap.Bin(prefix, mask, left, right) + } +} + +import IntMapUtils._ + +/** A companion object for integer maps. + * + * @define Coll `IntMap` + */ +object IntMap { + def empty[T] : IntMap[T] = IntMap.Nil + + def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value) + + def apply[T](elems: (Int, T)*): IntMap[T] = + elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) + + def from[V](coll: IterableOnce[(Int, V)]): IntMap[V] = + newBuilder[V].addAll(coll).result() + + private[immutable] case object Nil extends IntMap[Nothing] { + // Important! Without this equals method in place, an infinite + // loop from Map.equals => size => pattern-match-on-Nil => equals + // develops. Case objects and custom equality don't mix without + // careful handling. + override def equals(that : Any) = that match { + case _: this.type => true + case _: IntMap[_] => false // The only empty IntMaps are eq Nil + case _ => super.equals(that) + } + } + + private[immutable] case class Tip[+T](key: Int, value: T) extends IntMap[T]{ + def withValue[S](s: S) = + if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap.Tip[S]] + else IntMap.Tip(key, s) + } + + private[immutable] case class Bin[+T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]) extends IntMap[T] { + def bin[S](left: IntMap[S], right: IntMap[S]): IntMap[S] = { + if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[IntMap.Bin[S]] + else IntMap.Bin[S](prefix, mask, left, right) + } + } + + def newBuilder[V]: Builder[(Int, V), IntMap[V]] = + new ImmutableBuilder[(Int, V), IntMap[V]](empty) { + def addOne(elem: (Int, V)): this.type = { elems = elems + elem; this } + } + + implicit def toFactory[V](dummy: IntMap.type): Factory[(Int, V), IntMap[V]] = ToFactory.asInstanceOf[Factory[(Int, V), IntMap[V]]] + + @SerialVersionUID(3L) + private[this] object ToFactory extends Factory[(Int, AnyRef), IntMap[AnyRef]] with Serializable { + def fromSpecific(it: IterableOnce[(Int, AnyRef)]): IntMap[AnyRef] = IntMap.from[AnyRef](it) + def newBuilder: Builder[(Int, AnyRef), IntMap[AnyRef]] = IntMap.newBuilder[AnyRef] + } + + implicit def toBuildFrom[V](factory: IntMap.type): BuildFrom[Any, (Int, V), IntMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Int, V), IntMap[V]]] + private[this] object ToBuildFrom extends BuildFrom[Any, (Int, AnyRef), IntMap[AnyRef]] { + def fromSpecific(from: Any)(it: IterableOnce[(Int, AnyRef)]) = IntMap.from(it) + def newBuilder(from: Any) = IntMap.newBuilder[AnyRef] + } + + implicit def iterableFactory[V]: Factory[(Int, V), IntMap[V]] = toFactory(this) + implicit def buildFromIntMap[V]: BuildFrom[IntMap[_], (Int, V), IntMap[V]] = toBuildFrom(this) +} + +// Iterator over a non-empty IntMap. +private[immutable] abstract class IntMapIterator[V, T](it: IntMap[V]) extends AbstractIterator[T] { + + // Basically this uses a simple stack to emulate conversion over the tree. However + // because we know that Ints are at least 32 bits we can have at most 32 IntMap.Bins and + // one IntMap.Tip sitting on the tree at any point. Therefore we know the maximum stack + // depth is 33 and + var index = 0 + var buffer = new Array[AnyRef](33) + + def pop = { + index -= 1 + buffer(index).asInstanceOf[IntMap[V]] + } + + def push(x: IntMap[V]): Unit = { + buffer(index) = x.asInstanceOf[AnyRef] + index += 1 + } + push(it) + + /** + * What value do we assign to a tip? + */ + def valueOf(tip: IntMap.Tip[V]): T + + def hasNext = index != 0 + @tailrec + final def next(): T = + pop match { + case IntMap.Bin(_,_, t@IntMap.Tip(_, _), right) => { + push(right) + valueOf(t) + } + case IntMap.Bin(_, _, left, right) => { + push(right) + push(left) + next() + } + case t@IntMap.Tip(_, _) => valueOf(t) + // This should never happen. We don't allow IntMap.Nil in subtrees of the IntMap + // and don't return an IntMapIterator for IntMap.Nil. + case IntMap.Nil => throw new IllegalStateException("Empty maps not allowed as subtrees") + } +} + +private[immutable] class IntMapEntryIterator[V](it: IntMap[V]) extends IntMapIterator[V, (Int, V)](it) { + def valueOf(tip: IntMap.Tip[V]) = (tip.key, tip.value) +} + +private[immutable] class IntMapValueIterator[V](it: IntMap[V]) extends IntMapIterator[V, V](it) { + def valueOf(tip: IntMap.Tip[V]) = tip.value +} + +private[immutable] class IntMapKeyIterator[V](it: IntMap[V]) extends IntMapIterator[V, Int](it) { + def valueOf(tip: IntMap.Tip[V]) = tip.key +} + +import IntMap._ + +/** Specialised immutable map structure for integer keys, based on + * [[https://ittc.ku.edu/~andygill/papers/IntMap98.pdf Fast Mergeable Integer Maps]] + * by Okasaki and Gill. Essentially a trie based on binary digits of the integers. + * + * '''Note:''' This class is as of 2.8 largely superseded by HashMap. + * + * @tparam T type of the values associated with integer keys. + * + * @define Coll `immutable.IntMap` + * @define coll immutable integer map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed abstract class IntMap[+T] extends AbstractMap[Int, T] + with StrictOptimizedMapOps[Int, T, Map, IntMap[T]] + with Serializable { + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(Int, T) @uncheckedVariance]): IntMap[T] = + intMapFrom[T](coll) + protected def intMapFrom[V2](coll: scala.collection.IterableOnce[(Int, V2)]): IntMap[V2] = { + val b = IntMap.newBuilder[V2] + b.sizeHint(coll) + b.addAll(coll) + b.result() + } + override protected def newSpecificBuilder: Builder[(Int, T), IntMap[T]] @uncheckedVariance = + new ImmutableBuilder[(Int, T), IntMap[T]](empty) { + def addOne(elem: (Int, T)): this.type = { elems = elems + elem; this } + } + + override def empty: IntMap[T] = IntMap.Nil + + override def toList = { + val buffer = new scala.collection.mutable.ListBuffer[(Int, T)] + foreach(buffer += _) + buffer.toList + } + + /** + * Iterator over key, value pairs of the map in unsigned order of the keys. + * + * @return an iterator over pairs of integer keys and corresponding values. + */ + def iterator: Iterator[(Int, T)] = this match { + case IntMap.Nil => Iterator.empty + case _ => new IntMapEntryIterator(this) + } + + /** + * Loops over the key, value pairs of the map in unsigned order of the keys. + */ + override final def foreach[U](f: ((Int, T)) => U): Unit = this match { + case IntMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) } + case IntMap.Tip(key, value) => f((key, value)) + case IntMap.Nil => + } + + override def foreachEntry[U](f: (IntMapUtils.Int, T) => U): Unit = this match { + case IntMap.Bin(_, _, left, right) => { left.foreachEntry(f); right.foreachEntry(f) } + case IntMap.Tip(key, value) => f(key, value) + case IntMap.Nil => + } + + override def keysIterator: Iterator[Int] = this match { + case IntMap.Nil => Iterator.empty + case _ => new IntMapKeyIterator(this) + } + + /** + * Loop over the keys of the map. The same as `keys.foreach(f)`, but may + * be more efficient. + * + * @param f The loop body + */ + final def foreachKey[U](f: Int => U): Unit = this match { + case IntMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) } + case IntMap.Tip(key, _) => f(key) + case IntMap.Nil => + } + + override def valuesIterator: Iterator[T] = this match { + case IntMap.Nil => Iterator.empty + case _ => new IntMapValueIterator(this) + } + + /** + * Loop over the values of the map. The same as `values.foreach(f)`, but may + * be more efficient. + * + * @param f The loop body + */ + final def foreachValue[U](f: T => U): Unit = this match { + case IntMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) } + case IntMap.Tip(_, value) => f(value) + case IntMap.Nil => + } + + override protected[this] def className = "IntMap" + + override def isEmpty = this eq IntMap.Nil + override def knownSize: Int = if (isEmpty) 0 else super.knownSize + override def filter(f: ((Int, T)) => Boolean): IntMap[T] = this match { + case IntMap.Bin(prefix, mask, left, right) => { + val (newleft, newright) = (left.filter(f), right.filter(f)) + if ((left eq newleft) && (right eq newright)) this + else bin(prefix, mask, newleft, newright) + } + case IntMap.Tip(key, value) => + if (f((key, value))) this + else IntMap.Nil + case IntMap.Nil => IntMap.Nil + } + + override def transform[S](f: (Int, T) => S): IntMap[S] = this match { + case b@IntMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f)) + case t@IntMap.Tip(key, value) => t.withValue(f(key, value)) + case IntMap.Nil => IntMap.Nil + } + + final override def size: Int = this match { + case IntMap.Nil => 0 + case IntMap.Tip(_, _) => 1 + case IntMap.Bin(_, _, left, right) => left.size + right.size + } + + @tailrec + final def get(key: Int): Option[T] = this match { + case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key) + case IntMap.Tip(key2, value) => if (key == key2) Some(value) else None + case IntMap.Nil => None + } + + @tailrec + final override def getOrElse[S >: T](key: Int, default: => S): S = this match { + case IntMap.Nil => default + case IntMap.Tip(key2, value) => if (key == key2) value else default + case IntMap.Bin(prefix, mask, left, right) => + if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default) + } + + @tailrec + final override def apply(key: Int): T = this match { + case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key) + case IntMap.Tip(key2, value) => if (key == key2) value else throw new IllegalArgumentException("Key not found") + case IntMap.Nil => throw new IllegalArgumentException("key not found") + } + + override def + [S >: T] (kv: (Int, S)): IntMap[S] = updated(kv._1, kv._2) + + override def updated[S >: T](key: Int, value: S): IntMap[S] = this match { + case IntMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this) + else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updated(key, value), right) + else IntMap.Bin(prefix, mask, left, right.updated(key, value)) + case IntMap.Tip(key2, value2) => + if (key == key2) IntMap.Tip(key, value) + else join(key, IntMap.Tip(key, value), key2, this) + case IntMap.Nil => IntMap.Tip(key, value) + } + + def map[V2](f: ((Int, T)) => (Int, V2)): IntMap[V2] = intMapFrom(new View.Map(this, f)) + + def flatMap[V2](f: ((Int, T)) => IterableOnce[(Int, V2)]): IntMap[V2] = intMapFrom(new View.FlatMap(this, f)) + + override def concat[V1 >: T](that: collection.IterableOnce[(Int, V1)]): IntMap[V1] = + super.concat(that).asInstanceOf[IntMap[V1]] // Already has correct type but not declared as such + + override def ++ [V1 >: T](that: collection.IterableOnce[(Int, V1)]): IntMap[V1] = concat(that) + + def collect[V2](pf: PartialFunction[(Int, T), (Int, V2)]): IntMap[V2] = + strictOptimizedCollect(IntMap.newBuilder[V2], pf) + + /** + * Updates the map, using the provided function to resolve conflicts if the key is already present. + * + * Equivalent to: + * {{{ + * this.get(key) match { + * case None => this.update(key, value) + * case Some(oldvalue) => this.update(key, f(oldvalue, value) + * } + * }}} + * + * @tparam S The supertype of values in this `LongMap`. + * @param key The key to update + * @param value The value to use if there is no conflict + * @param f The function used to resolve conflicts. + * @return The updated map. + */ + def updateWith[S >: T](key: Int, value: S, f: (T, S) => S): IntMap[S] = this match { + case IntMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this) + else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updateWith(key, value, f), right) + else IntMap.Bin(prefix, mask, left, right.updateWith(key, value, f)) + case IntMap.Tip(key2, value2) => + if (key == key2) IntMap.Tip(key, f(value2, value)) + else join(key, IntMap.Tip(key, value), key2, this) + case IntMap.Nil => IntMap.Tip(key, value) + } + + def removed (key: Int): IntMap[T] = this match { + case IntMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) this + else if (zero(key, mask)) bin(prefix, mask, left - key, right) + else bin(prefix, mask, left, right - key) + case IntMap.Tip(key2, _) => + if (key == key2) IntMap.Nil + else this + case IntMap.Nil => IntMap.Nil + } + + /** + * A combined transform and filter function. Returns an `IntMap` such that + * for each `(key, value)` mapping in this map, if `f(key, value) == None` + * the map contains no mapping for key, and if `f(key, value)`. + * + * @tparam S The type of the values in the resulting `LongMap`. + * @param f The transforming function. + * @return The modified map. + */ + def modifyOrRemove[S](f: (Int, T) => Option[S]): IntMap[S] = this match { + case IntMap.Bin(prefix, mask, left, right) => + val newleft = left.modifyOrRemove(f) + val newright = right.modifyOrRemove(f) + if ((left eq newleft) && (right eq newright)) this.asInstanceOf[IntMap[S]] + else bin(prefix, mask, newleft, newright) + case IntMap.Tip(key, value) => f(key, value) match { + case None => + IntMap.Nil + case Some(value2) => + //hack to preserve sharing + if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap[S]] + else IntMap.Tip(key, value2) + } + case IntMap.Nil => + IntMap.Nil + } + + /** + * Forms a union map with that map, using the combining function to resolve conflicts. + * + * @tparam S The type of values in `that`, a supertype of values in `this`. + * @param that The map to form a union with. + * @param f The function used to resolve conflicts between two mappings. + * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`. + */ + def unionWith[S >: T](that: IntMap[S], f: (Int, S, S) => S): IntMap[S] = (this, that) match{ + case (IntMap.Bin(p1, m1, l1, r1), that@(IntMap.Bin(p2, m2, l2, r2))) => + if (shorter(m1, m2)) { + if (!hasMatch(p2, p1, m1)) join(p1, this, p2, that) + else if (zero(p2, m1)) IntMap.Bin(p1, m1, l1.unionWith(that, f), r1) + else IntMap.Bin(p1, m1, l1, r1.unionWith(that, f)) + } else if (shorter(m2, m1)){ + if (!hasMatch(p1, p2, m2)) join(p1, this, p2, that) + else if (zero(p1, m2)) IntMap.Bin(p2, m2, this.unionWith(l2, f), r2) + else IntMap.Bin(p2, m2, l2, this.unionWith(r2, f)) + } + else { + if (p1 == p2) IntMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f)) + else join(p1, this, p2, that) + } + case (IntMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x)) + case (x, IntMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y)) + case (IntMap.Nil, x) => x + case (x, IntMap.Nil) => x + } + + /** + * Forms the intersection of these two maps with a combining function. The + * resulting map is a map that has only keys present in both maps and has + * values produced from the original mappings by combining them with `f`. + * + * @tparam S The type of values in `that`. + * @tparam R The type of values in the resulting `LongMap`. + * @param that The map to intersect with. + * @param f The combining function. + * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`. + */ + def intersectionWith[S, R](that: IntMap[S], f: (Int, T, S) => R): IntMap[R] = (this, that) match { + case (IntMap.Bin(p1, m1, l1, r1), that@IntMap.Bin(p2, m2, l2, r2)) => + if (shorter(m1, m2)) { + if (!hasMatch(p2, p1, m1)) IntMap.Nil + else if (zero(p2, m1)) l1.intersectionWith(that, f) + else r1.intersectionWith(that, f) + } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f)) + else { + if (!hasMatch(p1, p2, m2)) IntMap.Nil + else if (zero(p1, m2)) this.intersectionWith(l2, f) + else this.intersectionWith(r2, f) + } + case (IntMap.Tip(key, value), that) => that.get(key) match { + case None => IntMap.Nil + case Some(value2) => IntMap.Tip(key, f(key, value, value2)) + } + case (_, IntMap.Tip(key, value)) => this.get(key) match { + case None => IntMap.Nil + case Some(value2) => IntMap.Tip(key, f(key, value2, value)) + } + case (_, _) => IntMap.Nil + } + + /** + * Left biased intersection. Returns the map that has all the same mappings + * as this but only for keys which are present in the other map. + * + * @tparam R The type of values in `that`. + * @param that The map to intersect with. + * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`. + */ + def intersection[R](that: IntMap[R]): IntMap[T] = + this.intersectionWith(that, (key: Int, value: T, value2: R) => value) + + def ++[S >: T](that: IntMap[S]) = + this.unionWith[S](that, (key, x, y) => y) + + /** + * The entry with the lowest key value considered in unsigned order. + */ + @tailrec + final def firstKey: Int = this match { + case Bin(_, _, l, r) => l.firstKey + case Tip(k, v) => k + case IntMap.Nil => throw new IllegalStateException("Empty set") + } + + /** + * The entry with the highest key value considered in unsigned order. + */ + @tailrec + final def lastKey: Int = this match { + case Bin(_, _, l, r) => r.lastKey + case Tip(k, v) => k + case IntMap.Nil => throw new IllegalStateException("Empty set") + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(IntMap.toFactory[T](IntMap), this) +} diff --git a/tests/pos-special/stdlib/collection/immutable/Iterable.scala b/tests/pos-special/stdlib/collection/immutable/Iterable.scala index 44f13d0f2895..d4199ab3ab14 100644 --- a/tests/pos-special/stdlib/collection/immutable/Iterable.scala +++ b/tests/pos-special/stdlib/collection/immutable/Iterable.scala @@ -13,7 +13,6 @@ package scala.collection.immutable import scala.collection.{IterableFactory, IterableFactoryDefaults} -import language.experimental.captureChecking /** A trait for collections that are guaranteed immutable. * @@ -25,7 +24,6 @@ import language.experimental.captureChecking trait Iterable[+A] extends collection.Iterable[A] with collection.IterableOps[A, Iterable, Iterable[A]] with IterableFactoryDefaults[A, Iterable] { - this: Iterable[A]^ => override def iterableFactory: IterableFactory[Iterable] = Iterable } diff --git a/tests/pos-special/stdlib/collection/immutable/LazyList.scala b/tests/pos-special/stdlib/collection/immutable/LazyList.scala new file mode 100644 index 000000000000..8b7ad26dc5ae --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/LazyList.scala @@ -0,0 +1,1381 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import java.io.{ObjectInputStream, ObjectOutputStream} +import java.lang.{StringBuilder => JStringBuilder} + +import scala.annotation.tailrec +import scala.collection.generic.SerializeEnd +import scala.collection.mutable.{Builder, ReusableBuilder, StringBuilder} +import scala.language.implicitConversions +import scala.runtime.Statics + +/** This class implements an immutable linked list. We call it "lazy" + * because it computes its elements only when they are needed. + * + * Elements are memoized; that is, the value of each element is computed at most once. + * + * Elements are computed in-order and are never skipped. In other words, + * accessing the tail causes the head to be computed first. + * + * How lazy is a `LazyList`? When you have a value of type `LazyList`, you + * don't know yet whether the list is empty or not. If you learn that it is non-empty, + * then you also know that the head has been computed. But the tail is itself + * a `LazyList`, whose emptiness-or-not might remain undetermined. + * + * A `LazyList` may be infinite. For example, `LazyList.from(0)` contains + * all of the natural numbers 0, 1, 2, and so on. For infinite sequences, + * some methods (such as `count`, `sum`, `max` or `min`) will not terminate. + * + * Here is an example: + * + * {{{ + * import scala.math.BigInt + * object Main extends App { + * val fibs: LazyList[BigInt] = + * BigInt(0) #:: BigInt(1) #:: fibs.zip(fibs.tail).map{ n => n._1 + n._2 } + * fibs.take(5).foreach(println) + * } + * + * // prints + * // + * // 0 + * // 1 + * // 1 + * // 2 + * // 3 + * }}} + * + * To illustrate, let's add some output to the definition `fibs`, so we + * see what's going on. + * + * {{{ + * import scala.math.BigInt + * object Main extends App { + * val fibs: LazyList[BigInt] = + * BigInt(0) #:: BigInt(1) #:: + * fibs.zip(fibs.tail).map{ n => + * println(s"Adding \${n._1} and \${n._2}") + * n._1 + n._2 + * } + * fibs.take(5).foreach(println) + * fibs.take(6).foreach(println) + * } + * + * // prints + * // + * // 0 + * // 1 + * // Adding 0 and 1 + * // 1 + * // Adding 1 and 1 + * // 2 + * // Adding 1 and 2 + * // 3 + * + * // And then prints + * // + * // 0 + * // 1 + * // 1 + * // 2 + * // 3 + * // Adding 2 and 3 + * // 5 + * }}} + * + * Note that the definition of `fibs` uses `val` not `def`. The memoization of the + * `LazyList` requires us to have somewhere to store the information and a `val` + * allows us to do that. + * + * Further remarks about the semantics of `LazyList`: + * + * - Though the `LazyList` changes as it is accessed, this does not + * contradict its immutability. Once the values are memoized they do + * not change. Values that have yet to be memoized still "exist", they + * simply haven't been computed yet. + * + * - One must be cautious of memoization; it can eat up memory if you're not + * careful. That's because memoization of the `LazyList` creates a structure much like + * [[scala.collection.immutable.List]]. As long as something is holding on to + * the head, the head holds on to the tail, and so on recursively. + * If, on the other hand, there is nothing holding on to the head (e.g. if we used + * `def` to define the `LazyList`) then once it is no longer being used directly, + * it disappears. + * + * - Note that some operations, including [[drop]], [[dropWhile]], + * [[flatMap]] or [[collect]] may process a large number of intermediate + * elements before returning. + * + * Here's another example. Let's start with the natural numbers and iterate + * over them. + * + * {{{ + * // We'll start with a silly iteration + * def loop(s: String, i: Int, iter: Iterator[Int]): Unit = { + * // Stop after 200,000 + * if (i < 200001) { + * if (i % 50000 == 0) println(s + i) + * loop(s, iter.next(), iter) + * } + * } + * + * // Our first LazyList definition will be a val definition + * val lazylist1: LazyList[Int] = { + * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) + * loop(0) + * } + * + * // Because lazylist1 is a val, everything that the iterator produces is held + * // by virtue of the fact that the head of the LazyList is held in lazylist1 + * val it1 = lazylist1.iterator + * loop("Iterator1: ", it1.next(), it1) + * + * // We can redefine this LazyList such that all we have is the Iterator left + * // and allow the LazyList to be garbage collected as required. Using a def + * // to provide the LazyList ensures that no val is holding onto the head as + * // is the case with lazylist1 + * def lazylist2: LazyList[Int] = { + * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) + * loop(0) + * } + * val it2 = lazylist2.iterator + * loop("Iterator2: ", it2.next(), it2) + * + * // And, of course, we don't actually need a LazyList at all for such a simple + * // problem. There's no reason to use a LazyList if you don't actually need + * // one. + * val it3 = new Iterator[Int] { + * var i = -1 + * def hasNext = true + * def next(): Int = { i += 1; i } + * } + * loop("Iterator3: ", it3.next(), it3) + * }}} + * + * - In the `fibs` example earlier, the fact that `tail` works at all is of interest. + * `fibs` has an initial `(0, 1, LazyList(...))`, so `tail` is deterministic. + * If we defined `fibs` such that only `0` were concretely known, then the act + * of determining `tail` would require the evaluation of `tail`, so the + * computation would be unable to progress, as in this code: + * {{{ + * // The first time we try to access the tail we're going to need more + * // information which will require us to recurse, which will require us to + * // recurse, which... + * lazy val sov: LazyList[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 } + * }}} + * + * The definition of `fibs` above creates a larger number of objects than + * necessary depending on how you might want to implement it. The following + * implementation provides a more "cost effective" implementation due to the + * fact that it has a more direct route to the numbers themselves: + * + * {{{ + * lazy val fib: LazyList[Int] = { + * def loop(h: Int, n: Int): LazyList[Int] = h #:: loop(n, h + n) + * loop(1, 1) + * } + * }}} + * + * The head, the tail and whether the list is empty or not can be initially unknown. + * Once any of those are evaluated, they are all known, though if the tail is + * built with `#::` or `#:::`, it's content still isn't evaluated. Instead, evaluating + * the tails content is deferred until the tails empty status, head or tail is + * evaluated. + * + * Delaying the evaluation of whether a LazyList is empty or not until it's needed + * allows LazyList to not eagerly evaluate any elements on a call to `filter`. + * + * Only when it's further evaluated (which may be never!) any of the elements gets + * forced. + * + * for example: + * + * {{{ + * def tailWithSideEffect: LazyList[Nothing] = { + * println("getting empty LazyList") + * LazyList.empty + * } + * + * val emptyTail = tailWithSideEffect // prints "getting empty LazyList" + * + * val suspended = 1 #:: tailWithSideEffect // doesn't print anything + * val tail = suspended.tail // although the tail is evaluated, *still* nothing is yet printed + * val filtered = tail.filter(_ => false) // still nothing is printed + * filtered.isEmpty // prints "getting empty LazyList" + * }}} + * + * @tparam A the type of the elements contained in this lazy list. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#lazylists "Scala's Collection Library overview"]] + * section on `LazyLists` for more information. + * @define Coll `LazyList` + * @define coll lazy list + * @define orderDependent + * @define orderDependentFold + * @define appendStackSafety Note: Repeated chaining of calls to append methods (`appended`, + * `appendedAll`, `lazyAppendedAll`) without forcing any of the + * intermediate resulting lazy lists may overflow the stack when + * the final result is forced. + * @define preservesLaziness This method preserves laziness; elements are only evaluated + * individually as needed. + * @define initiallyLazy This method does not evaluate anything until an operation is performed + * on the result (e.g. calling `head` or `tail`, or checking if it is empty). + * @define evaluatesAllElements This method evaluates all elements of the collection. + */ +@SerialVersionUID(3L) +final class LazyList[+A] private(private[this] var lazyState: () => LazyList.State[A]) + extends AbstractSeq[A] + with LinearSeq[A] + with LinearSeqOps[A, LazyList, LazyList[A]] + with IterableFactoryDefaults[A, LazyList] + with Serializable { + import LazyList._ + + @volatile private[this] var stateEvaluated: Boolean = false + @inline private def stateDefined: Boolean = stateEvaluated + private[this] var midEvaluation = false + + private lazy val state: State[A] = { + // if it's already mid-evaluation, we're stuck in an infinite + // self-referential loop (also it's empty) + if (midEvaluation) { + throw new RuntimeException("self-referential LazyList or a derivation thereof has no more elements") + } + midEvaluation = true + val res = try lazyState() finally midEvaluation = false + // if we set it to `true` before evaluating, we may infinite loop + // if something expects `state` to already be evaluated + stateEvaluated = true + lazyState = null // allow GC + res + } + + override def iterableFactory: SeqFactory[LazyList] = LazyList + + override def isEmpty: Boolean = state eq State.Empty + + /** @inheritdoc + * + * $preservesLaziness + */ + override def knownSize: Int = if (knownIsEmpty) 0 else -1 + + override def head: A = state.head + + override def tail: LazyList[A] = state.tail + + @inline private[this] def knownIsEmpty: Boolean = stateEvaluated && (isEmpty: @inline) + @inline private def knownNonEmpty: Boolean = stateEvaluated && !(isEmpty: @inline) + + /** Evaluates all undefined elements of the lazy list. + * + * This method detects cycles in lazy lists, and terminates after all + * elements of the cycle are evaluated. For example: + * + * {{{ + * val ring: LazyList[Int] = 1 #:: 2 #:: 3 #:: ring + * ring.force + * ring.toString + * + * // prints + * // + * // LazyList(1, 2, 3, ...) + * }}} + * + * This method will *not* terminate for non-cyclic infinite-sized collections. + * + * @return this + */ + def force: this.type = { + // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) + var these, those: LazyList[A] = this + if (!these.isEmpty) { + these = these.tail + } + while (those ne these) { + if (these.isEmpty) return this + these = these.tail + if (these.isEmpty) return this + these = these.tail + if (these eq those) return this + those = those.tail + } + this + } + + /** @inheritdoc + * + * The iterator returned by this method preserves laziness; elements are + * only evaluated individually as needed. + */ + override def iterator: Iterator[A] = + if (knownIsEmpty) Iterator.empty + else new LazyIterator(this) + + /** Apply the given function `f` to each element of this linear sequence + * (while respecting the order of the elements). + * + * @param f The treatment to apply to each element. + * @note Overridden here as final to trigger tail-call optimization, which + * replaces 'this' with 'tail' at each iteration. This is absolutely + * necessary for allowing the GC to collect the underlying LazyList as elements + * are consumed. + * @note This function will force the realization of the entire LazyList + * unless the `f` throws an exception. + */ + @tailrec + override def foreach[U](f: A => U): Unit = { + if (!isEmpty) { + f(head) + tail.foreach(f) + } + } + + /** LazyList specialization of foldLeft which allows GC to collect along the + * way. + * + * @tparam B The type of value being accumulated. + * @param z The initial value seeded into the function `op`. + * @param op The operation to perform on successive elements of the `LazyList`. + * @return The accumulated value from successive applications of `op`. + */ + @tailrec + override def foldLeft[B](z: B)(op: (B, A) => B): B = + if (isEmpty) z + else tail.foldLeft(op(z, head))(op) + + // State.Empty doesn't use the SerializationProxy + protected[this] def writeReplace(): AnyRef = + if (knownNonEmpty) new LazyList.SerializationProxy[A](this) else this + + override protected[this] def className = "LazyList" + + /** The lazy list resulting from the concatenation of this lazy list with the argument lazy list. + * + * $preservesLaziness + * + * $appendStackSafety + * + * @param suffix The collection that gets appended to this lazy list + * @return The lazy list containing elements of this lazy list and the iterable object. + */ + def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]): LazyList[B] = + newLL { + if (isEmpty) suffix match { + case lazyList: LazyList[B] => lazyList.state // don't recompute the LazyList + case coll if coll.knownSize == 0 => State.Empty + case coll => stateFromIterator(coll.iterator) + } + else sCons(head, tail lazyAppendedAll suffix) + } + + /** @inheritdoc + * + * $preservesLaziness + * + * $appendStackSafety + */ + override def appendedAll[B >: A](suffix: IterableOnce[B]): LazyList[B] = + if (knownIsEmpty) LazyList.from(suffix) + else lazyAppendedAll(suffix) + + /** @inheritdoc + * + * $preservesLaziness + * + * $appendStackSafety + */ + override def appended[B >: A](elem: B): LazyList[B] = + if (knownIsEmpty) newLL(sCons(elem, LazyList.empty)) + else lazyAppendedAll(Iterator.single(elem)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def scanLeft[B](z: B)(op: (B, A) => B): LazyList[B] = + if (knownIsEmpty) newLL(sCons(z, LazyList.empty)) + else newLL(scanLeftState(z)(op)) + + private def scanLeftState[B](z: B)(op: (B, A) => B): State[B] = + sCons( + z, + newLL { + if (isEmpty) State.Empty + else tail.scanLeftState(op(z, head))(op) + } + ) + + /** LazyList specialization of reduceLeft which allows GC to collect + * along the way. + * + * @tparam B The type of value being accumulated. + * @param f The operation to perform on successive elements of the `LazyList`. + * @return The accumulated value from successive applications of `f`. + */ + override def reduceLeft[B >: A](f: (B, A) => B): B = { + if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") + else { + var reducedRes: B = this.head + var left: LazyList[A] = this.tail + while (!left.isEmpty) { + reducedRes = f(reducedRes, left.head) + left = left.tail + } + reducedRes + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def partition(p: A => Boolean): (LazyList[A], LazyList[A]) = (filter(p), filterNot(p)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def partitionMap[A1, A2](f: A => Either[A1, A2]): (LazyList[A1], LazyList[A2]) = { + val (left, right) = map(f).partition(_.isLeft) + (left.map(_.asInstanceOf[Left[A1, _]].value), right.map(_.asInstanceOf[Right[_, A2]].value)) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def filter(pred: A => Boolean): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else LazyList.filterImpl(this, pred, isFlipped = false) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def filterNot(pred: A => Boolean): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else LazyList.filterImpl(this, pred, isFlipped = true) + + /** A `collection.WithFilter` which allows GC of the head of lazy list during processing. + * + * This method is not particularly useful for a lazy list, as [[filter]] already preserves + * laziness. + * + * The `collection.WithFilter` returned by this method preserves laziness; elements are + * only evaluated individually as needed. + */ + override def withFilter(p: A => Boolean): collection.WithFilter[A, LazyList] = + new LazyList.WithFilter(coll, p) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def prepended[B >: A](elem: B): LazyList[B] = newLL(sCons(elem, this)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): LazyList[B] = + if (knownIsEmpty) LazyList.from(prefix) + else if (prefix.knownSize == 0) this + else newLL(stateFromIteratorConcatSuffix(prefix.iterator)(state)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def map[B](f: A => B): LazyList[B] = + if (knownIsEmpty) LazyList.empty + else (mapImpl(f): @inline) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def tapEach[U](f: A => U): LazyList[A] = map { a => f(a); a } + + private def mapImpl[B](f: A => B): LazyList[B] = + newLL { + if (isEmpty) State.Empty + else sCons(f(head), tail.mapImpl(f)) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def collect[B](pf: PartialFunction[A, B]): LazyList[B] = + if (knownIsEmpty) LazyList.empty + else LazyList.collectImpl(this, pf) + + /** @inheritdoc + * + * This method does not evaluate any elements further than + * the first element for which the partial function is defined. + */ + @tailrec + override def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = + if (isEmpty) None + else { + val res = pf.applyOrElse(head, LazyList.anyToMarker.asInstanceOf[A => B]) + if (res.asInstanceOf[AnyRef] eq Statics.pfMarker) tail.collectFirst(pf) + else Some(res) + } + + /** @inheritdoc + * + * This method does not evaluate any elements further than + * the first element matching the predicate. + */ + @tailrec + override def find(p: A => Boolean): Option[A] = + if (isEmpty) None + else { + val elem = head + if (p(elem)) Some(elem) + else tail.find(p) + } + + /** @inheritdoc + * + * $preservesLaziness + */ + // optimisations are not for speed, but for functionality + // see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala) + override def flatMap[B](f: A => IterableOnce[B]): LazyList[B] = + if (knownIsEmpty) LazyList.empty + else LazyList.flatMapImpl(this, f) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def flatten[B](implicit asIterable: A => IterableOnce[B]): LazyList[B] = flatMap(asIterable) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def zip[B](that: collection.IterableOnce[B]): LazyList[(A, B)] = + if (this.knownIsEmpty || that.knownSize == 0) LazyList.empty + else newLL(zipState(that.iterator)) + + private def zipState[B](it: Iterator[B]): State[(A, B)] = + if (this.isEmpty || !it.hasNext) State.Empty + else sCons((head, it.next()), newLL { tail zipState it }) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def zipWithIndex: LazyList[(A, Int)] = this zip LazyList.from(0) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def zipAll[A1 >: A, B](that: collection.Iterable[B], thisElem: A1, thatElem: B): LazyList[(A1, B)] = { + if (this.knownIsEmpty) { + if (that.knownSize == 0) LazyList.empty + else LazyList.continually(thisElem) zip that + } else { + if (that.knownSize == 0) zip(LazyList.continually(thatElem)) + else newLL(zipAllState(that.iterator, thisElem, thatElem)) + } + } + + private def zipAllState[A1 >: A, B](it: Iterator[B], thisElem: A1, thatElem: B): State[(A1, B)] = { + if (it.hasNext) { + if (this.isEmpty) sCons((thisElem, it.next()), newLL { LazyList.continually(thisElem) zipState it }) + else sCons((this.head, it.next()), newLL { this.tail.zipAllState(it, thisElem, thatElem) }) + } else { + if (this.isEmpty) State.Empty + else sCons((this.head, thatElem), this.tail zip LazyList.continually(thatElem)) + } + } + + /** @inheritdoc + * + * This method is not particularly useful for a lazy list, as [[zip]] already preserves + * laziness. + * + * The `collection.LazyZip2` returned by this method preserves laziness; elements are + * only evaluated individually as needed. + */ + // just in case it can be meaningfully overridden at some point + override def lazyZip[B](that: collection.Iterable[B]): LazyZip2[A, B, LazyList.this.type] = + super.lazyZip(that) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def unzip[A1, A2](implicit asPair: A => (A1, A2)): (LazyList[A1], LazyList[A2]) = + (map(asPair(_)._1), map(asPair(_)._2)) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (LazyList[A1], LazyList[A2], LazyList[A3]) = + (map(asTriple(_)._1), map(asTriple(_)._2), map(asTriple(_)._3)) + + /** @inheritdoc + * + * $initiallyLazy + * Additionally, it preserves laziness for all except the first `n` elements. + */ + override def drop(n: Int): LazyList[A] = + if (n <= 0) this + else if (knownIsEmpty) LazyList.empty + else LazyList.dropImpl(this, n) + + /** @inheritdoc + * + * $initiallyLazy + * Additionally, it preserves laziness for all elements after the predicate returns `false`. + */ + override def dropWhile(p: A => Boolean): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else LazyList.dropWhileImpl(this, p) + + /** @inheritdoc + * + * $initiallyLazy + */ + override def dropRight(n: Int): LazyList[A] = { + if (n <= 0) this + else if (knownIsEmpty) LazyList.empty + else newLL { + var scout = this + var remaining = n + // advance scout n elements ahead (or until empty) + while (remaining > 0 && !scout.isEmpty) { + remaining -= 1 + scout = scout.tail + } + dropRightState(scout) + } + } + + private def dropRightState(scout: LazyList[_]): State[A] = + if (scout.isEmpty) State.Empty + else sCons(head, newLL(tail.dropRightState(scout.tail))) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def take(n: Int): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else (takeImpl(n): @inline) + + private def takeImpl(n: Int): LazyList[A] = { + if (n <= 0) LazyList.empty + else newLL { + if (isEmpty) State.Empty + else sCons(head, tail.takeImpl(n - 1)) + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def takeWhile(p: A => Boolean): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else (takeWhileImpl(p): @inline) + + private def takeWhileImpl(p: A => Boolean): LazyList[A] = + newLL { + if (isEmpty || !p(head)) State.Empty + else sCons(head, tail.takeWhileImpl(p)) + } + + /** @inheritdoc + * + * $initiallyLazy + */ + override def takeRight(n: Int): LazyList[A] = + if (n <= 0 || knownIsEmpty) LazyList.empty + else LazyList.takeRightImpl(this, n) + + /** @inheritdoc + * + * $initiallyLazy + * Additionally, it preserves laziness for all but the first `from` elements. + */ + override def slice(from: Int, until: Int): LazyList[A] = take(until).drop(from) + + /** @inheritdoc + * + * $evaluatesAllElements + */ + override def reverse: LazyList[A] = reverseOnto(LazyList.empty) + + // need contravariant type B to make the compiler happy - still returns LazyList[A] + @tailrec + private def reverseOnto[B >: A](tl: LazyList[B]): LazyList[B] = + if (isEmpty) tl + else tail.reverseOnto(newLL(sCons(head, tl))) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def diff[B >: A](that: collection.Seq[B]): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else super.diff(that) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def intersect[B >: A](that: collection.Seq[B]): LazyList[A] = + if (knownIsEmpty) LazyList.empty + else super.intersect(that) + + @tailrec + private def lengthGt(len: Int): Boolean = + if (len < 0) true + else if (isEmpty) false + else tail.lengthGt(len - 1) + + /** @inheritdoc + * + * The iterator returned by this method mostly preserves laziness; + * a single element ahead of the iterator is evaluated. + */ + override def grouped(size: Int): Iterator[LazyList[A]] = { + require(size > 0, "size must be positive, but was " + size) + slidingImpl(size = size, step = size) + } + + /** @inheritdoc + * + * The iterator returned by this method mostly preserves laziness; + * `size - step max 1` elements ahead of the iterator are evaluated. + */ + override def sliding(size: Int, step: Int): Iterator[LazyList[A]] = { + require(size > 0 && step > 0, s"size=$size and step=$step, but both must be positive") + slidingImpl(size = size, step = step) + } + + @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyList[A]] = + if (knownIsEmpty) Iterator.empty + else new SlidingIterator[A](this, size = size, step = step) + + /** @inheritdoc + * + * $preservesLaziness + */ + override def padTo[B >: A](len: Int, elem: B): LazyList[B] = { + if (len <= 0) this + else newLL { + if (isEmpty) LazyList.fill(len)(elem).state + else sCons(head, tail.padTo(len - 1, elem)) + } + } + + /** @inheritdoc + * + * $preservesLaziness + */ + override def patch[B >: A](from: Int, other: IterableOnce[B], replaced: Int): LazyList[B] = + if (knownIsEmpty) LazyList from other + else patchImpl(from, other, replaced) + + private def patchImpl[B >: A](from: Int, other: IterableOnce[B], replaced: Int): LazyList[B] = + newLL { + if (from <= 0) stateFromIteratorConcatSuffix(other.iterator)(LazyList.dropImpl(this, replaced).state) + else if (isEmpty) stateFromIterator(other.iterator) + else sCons(head, tail.patchImpl(from - 1, other, replaced)) + } + + /** @inheritdoc + * + * $evaluatesAllElements + */ + // overridden just in case a lazy implementation is developed at some point + override def transpose[B](implicit asIterable: A => collection.Iterable[B]): LazyList[LazyList[B]] = super.transpose + + /** @inheritdoc + * + * $preservesLaziness + */ + override def updated[B >: A](index: Int, elem: B): LazyList[B] = + if (index < 0) throw new IndexOutOfBoundsException(s"$index") + else updatedImpl(index, elem, index) + + private def updatedImpl[B >: A](index: Int, elem: B, startIndex: Int): LazyList[B] = { + newLL { + if (index <= 0) sCons(elem, tail) + else if (tail.isEmpty) throw new IndexOutOfBoundsException(startIndex.toString) + else sCons(head, tail.updatedImpl(index - 1, elem, startIndex)) + } + } + + /** Appends all elements of this $coll to a string builder using start, end, and separator strings. + * The written text begins with the string `start` and ends with the string `end`. + * Inside, the string representations (w.r.t. the method `toString`) + * of all elements of this $coll are separated by the string `sep`. + * + * An undefined state is represented with `"<not computed>"` and cycles are represented with `"<cycle>"`. + * + * $evaluatesAllElements + * + * @param sb the string builder to which elements are appended. + * @param start the starting string. + * @param sep the separator string. + * @param end the ending string. + * @return the string builder `b` to which elements were appended. + */ + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { + force + addStringNoForce(sb.underlying, start, sep, end) + sb + } + + private[this] def addStringNoForce(b: JStringBuilder, start: String, sep: String, end: String): JStringBuilder = { + b.append(start) + if (!stateDefined) b.append("") + else if (!isEmpty) { + b.append(head) + var cursor = this + @inline def appendCursorElement(): Unit = b.append(sep).append(cursor.head) + var scout = tail + @inline def scoutNonEmpty: Boolean = scout.stateDefined && !scout.isEmpty + if ((cursor ne scout) && (!scout.stateDefined || (cursor.state ne scout.state))) { + cursor = scout + if (scoutNonEmpty) { + scout = scout.tail + // Use 2x 1x iterator trick for cycle detection; slow iterator can add strings + while ((cursor ne scout) && scoutNonEmpty && (cursor.state ne scout.state)) { + appendCursorElement() + cursor = cursor.tail + scout = scout.tail + if (scoutNonEmpty) scout = scout.tail + } + } + } + if (!scoutNonEmpty) { // Not a cycle, scout hit an end + while (cursor ne scout) { + appendCursorElement() + cursor = cursor.tail + } + // if cursor (eq scout) has state defined, it is empty; else unknown state + if (!cursor.stateDefined) b.append(sep).append("") + } else { + @inline def same(a: LazyList[A], b: LazyList[A]): Boolean = (a eq b) || (a.state eq b.state) + // Cycle. + // If we have a prefix of length P followed by a cycle of length C, + // the scout will be at position (P%C) in the cycle when the cursor + // enters it at P. They'll then collide when the scout advances another + // C - (P%C) ahead of the cursor. + // If we run the scout P farther, then it will be at the start of + // the cycle: (C - (P%C) + (P%C)) == C == 0. So if another runner + // starts at the beginning of the prefix, they'll collide exactly at + // the start of the loop. + var runner = this + var k = 0 + while (!same(runner, scout)) { + runner = runner.tail + scout = scout.tail + k += 1 + } + // Now runner and scout are at the beginning of the cycle. Advance + // cursor, adding to string, until it hits; then we'll have covered + // everything once. If cursor is already at beginning, we'd better + // advance one first unless runner didn't go anywhere (in which case + // we've already looped once). + if (same(cursor, scout) && (k > 0)) { + appendCursorElement() + cursor = cursor.tail + } + while (!same(cursor, scout)) { + appendCursorElement() + cursor = cursor.tail + } + b.append(sep).append("") + } + } + b.append(end) + } + + /** $preservesLaziness + * + * @return a string representation of this collection. An undefined state is + * represented with `"<not computed>"` and cycles are represented with `"<cycle>"` + * + * Examples: + * + * - `"LazyList(4, <not computed>)"`, a non-empty lazy list ; + * - `"LazyList(1, 2, 3, <not computed>)"`, a lazy list with at least three elements ; + * - `"LazyList(1, 2, 3, <cycle>)"`, an infinite lazy list that contains + * a cycle at the fourth element. + */ + override def toString(): String = addStringNoForce(new JStringBuilder(className), "(", ", ", ")").toString + + /** @inheritdoc + * + * $preservesLaziness + */ + @deprecated("Check .knownSize instead of .hasDefiniteSize for more actionable information (see scaladoc for details)", "2.13.0") + override def hasDefiniteSize: Boolean = { + if (!stateDefined) false + else if (isEmpty) true + else { + // Two-iterator trick (2x & 1x speed) for cycle detection. + var those = this + var these = tail + while (those ne these) { + if (!these.stateDefined) return false + else if (these.isEmpty) return true + these = these.tail + if (!these.stateDefined) return false + else if (these.isEmpty) return true + these = these.tail + if (those eq these) return false + those = those.tail + } + false // Cycle detected + } + } +} + +/** + * $factoryInfo + * @define coll lazy list + * @define Coll `LazyList` + */ +@SerialVersionUID(3L) +object LazyList extends SeqFactory[LazyList] { + // Eagerly evaluate cached empty instance + private[this] val _empty = newLL(State.Empty).force + + private sealed trait State[+A] extends Serializable { + def head: A + def tail: LazyList[A] + } + + private object State { + @SerialVersionUID(3L) + object Empty extends State[Nothing] { + def head: Nothing = throw new NoSuchElementException("head of empty lazy list") + def tail: LazyList[Nothing] = throw new UnsupportedOperationException("tail of empty lazy list") + } + + @SerialVersionUID(3L) + final class Cons[A](val head: A, val tail: LazyList[A]) extends State[A] + } + + /** Creates a new LazyList. */ + @inline private def newLL[A](state: => State[A]): LazyList[A] = new LazyList[A](() => state) + + /** Creates a new State.Cons. */ + @inline private def sCons[A](hd: A, tl: LazyList[A]): State[A] = new State.Cons[A](hd, tl) + + private val anyToMarker: Any => Any = _ => Statics.pfMarker + + /* All of the following `Impl` methods are carefully written so as not to + * leak the beginning of the `LazyList`. They copy the initial `LazyList` (`ll`) into + * `var rest`, which gets closed over as a `scala.runtime.ObjectRef`, thus not permanently + * leaking the head of the `LazyList`. Additionally, the methods are written so that, should + * an exception be thrown by the evaluation of the `LazyList` or any supplied function, they + * can continue their execution where they left off. + */ + + private def filterImpl[A](ll: LazyList[A], p: A => Boolean, isFlipped: Boolean): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + var elem: A = null.asInstanceOf[A] + var found = false + var rest = restRef // var rest = restRef.elem + while (!found && !rest.isEmpty) { + elem = rest.head + found = p(elem) != isFlipped + rest = rest.tail + restRef = rest // restRef.elem = rest + } + if (found) sCons(elem, filterImpl(rest, p, isFlipped)) else State.Empty + } + } + + private def collectImpl[A, B](ll: LazyList[A], pf: PartialFunction[A, B]): LazyList[B] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + val marker = Statics.pfMarker + val toMarker = anyToMarker.asInstanceOf[A => B] // safe because Function1 is erased + + var res: B = marker.asInstanceOf[B] // safe because B is unbounded + var rest = restRef // var rest = restRef.elem + while((res.asInstanceOf[AnyRef] eq marker) && !rest.isEmpty) { + res = pf.applyOrElse(rest.head, toMarker) + rest = rest.tail + restRef = rest // restRef.elem = rest + } + if (res.asInstanceOf[AnyRef] eq marker) State.Empty + else sCons(res, collectImpl(rest, pf)) + } + } + + private def flatMapImpl[A, B](ll: LazyList[A], f: A => IterableOnce[B]): LazyList[B] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + var it: Iterator[B] = null + var itHasNext = false + var rest = restRef // var rest = restRef.elem + while (!itHasNext && !rest.isEmpty) { + it = f(rest.head).iterator + itHasNext = it.hasNext + if (!itHasNext) { // wait to advance `rest` because `it.next()` can throw + rest = rest.tail + restRef = rest // restRef.elem = rest + } + } + if (itHasNext) { + val head = it.next() + rest = rest.tail + restRef = rest // restRef.elem = rest + sCons(head, newLL(stateFromIteratorConcatSuffix(it)(flatMapImpl(rest, f).state))) + } else State.Empty + } + } + + private def dropImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + var iRef = n // val iRef = new IntRef(n) + newLL { + var rest = restRef // var rest = restRef.elem + var i = iRef // var i = iRef.elem + while (i > 0 && !rest.isEmpty) { + rest = rest.tail + restRef = rest // restRef.elem = rest + i -= 1 + iRef = i // iRef.elem = i + } + rest.state + } + } + + private def dropWhileImpl[A](ll: LazyList[A], p: A => Boolean): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + newLL { + var rest = restRef // var rest = restRef.elem + while (!rest.isEmpty && p(rest.head)) { + rest = rest.tail + restRef = rest // restRef.elem = rest + } + rest.state + } + } + + private def takeRightImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { + // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD + var restRef = ll // val restRef = new ObjectRef(ll) + var scoutRef = ll // val scoutRef = new ObjectRef(ll) + var remainingRef = n // val remainingRef = new IntRef(n) + newLL { + var scout = scoutRef // var scout = scoutRef.elem + var remaining = remainingRef // var remaining = remainingRef.elem + // advance `scout` `n` elements ahead (or until empty) + while (remaining > 0 && !scout.isEmpty) { + scout = scout.tail + scoutRef = scout // scoutRef.elem = scout + remaining -= 1 + remainingRef = remaining // remainingRef.elem = remaining + } + var rest = restRef // var rest = restRef.elem + // advance `rest` and `scout` in tandem until `scout` reaches the end + while(!scout.isEmpty) { + scout = scout.tail + scoutRef = scout // scoutRef.elem = scout + rest = rest.tail // can't throw an exception as `scout` has already evaluated its tail + restRef = rest // restRef.elem = rest + } + // `rest` is the last `n` elements (or all of them) + rest.state + } + } + + /** An alternative way of building and matching lazy lists using LazyList.cons(hd, tl). + */ + object cons { + /** A lazy list consisting of a given first element and remaining elements + * @param hd The first element of the result lazy list + * @param tl The remaining elements of the result lazy list + */ + def apply[A](hd: => A, tl: => LazyList[A]): LazyList[A] = newLL(sCons(hd, newLL(tl.state))) + + /** Maps a lazy list to its head and tail */ + def unapply[A](xs: LazyList[A]): Option[(A, LazyList[A])] = #::.unapply(xs) + } + + implicit def toDeferrer[A](l: => LazyList[A]): Deferrer[A] = new Deferrer[A](() => l) + + final class Deferrer[A] private[LazyList] (private val l: () => LazyList[A]) extends AnyVal { + /** Construct a LazyList consisting of a given first element followed by elements + * from another LazyList. + */ + def #:: [B >: A](elem: => B): LazyList[B] = newLL(sCons(elem, newLL(l().state))) + /** Construct a LazyList consisting of the concatenation of the given LazyList and + * another LazyList. + */ + def #:::[B >: A](prefix: LazyList[B]): LazyList[B] = prefix lazyAppendedAll l() + } + + object #:: { + def unapply[A](s: LazyList[A]): Option[(A, LazyList[A])] = + if (!s.isEmpty) Some((s.head, s.tail)) else None + } + + def from[A](coll: collection.IterableOnce[A]): LazyList[A] = coll match { + case lazyList: LazyList[A] => lazyList + case _ if coll.knownSize == 0 => empty[A] + case _ => newLL(stateFromIterator(coll.iterator)) + } + + def empty[A]: LazyList[A] = _empty + + /** Creates a State from an Iterator, with another State appended after the Iterator + * is empty. + */ + private def stateFromIteratorConcatSuffix[A](it: Iterator[A])(suffix: => State[A]): State[A] = + if (it.hasNext) sCons(it.next(), newLL(stateFromIteratorConcatSuffix(it)(suffix))) + else suffix + + /** Creates a State from an IterableOnce. */ + private def stateFromIterator[A](it: Iterator[A]): State[A] = + if (it.hasNext) sCons(it.next(), newLL(stateFromIterator(it))) + else State.Empty + + override def concat[A](xss: collection.Iterable[A]*): LazyList[A] = + if (xss.knownSize == 0) empty + else newLL(concatIterator(xss.iterator)) + + private def concatIterator[A](it: Iterator[collection.Iterable[A]]): State[A] = + if (!it.hasNext) State.Empty + else stateFromIteratorConcatSuffix(it.next().iterator)(concatIterator(it)) + + /** An infinite LazyList that repeatedly applies a given function to a start value. + * + * @param start the start value of the LazyList + * @param f the function that's repeatedly applied + * @return the LazyList returning the infinite sequence of values `start, f(start), f(f(start)), ...` + */ + def iterate[A](start: => A)(f: A => A): LazyList[A] = + newLL { + val head = start + sCons(head, iterate(f(head))(f)) + } + + /** + * Create an infinite LazyList starting at `start` and incrementing by + * step `step`. + * + * @param start the start value of the LazyList + * @param step the increment value of the LazyList + * @return the LazyList starting at value `start`. + */ + def from(start: Int, step: Int): LazyList[Int] = + newLL(sCons(start, from(start + step, step))) + + /** + * Create an infinite LazyList starting at `start` and incrementing by `1`. + * + * @param start the start value of the LazyList + * @return the LazyList starting at value `start`. + */ + def from(start: Int): LazyList[Int] = from(start, 1) + + /** + * Create an infinite LazyList containing the given element expression (which + * is computed for each occurrence). + * + * @param elem the element composing the resulting LazyList + * @return the LazyList containing an infinite number of elem + */ + def continually[A](elem: => A): LazyList[A] = newLL(sCons(elem, continually(elem))) + + override def fill[A](n: Int)(elem: => A): LazyList[A] = + if (n > 0) newLL(sCons(elem, fill(n - 1)(elem))) else empty + + override def tabulate[A](n: Int)(f: Int => A): LazyList[A] = { + def at(index: Int): LazyList[A] = + if (index < n) newLL(sCons(f(index), at(index + 1))) else empty + + at(0) + } + + // significantly simpler than the iterator returned by Iterator.unfold + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): LazyList[A] = + newLL { + f(init) match { + case Some((elem, state)) => sCons(elem, unfold(state)(f)) + case None => State.Empty + } + } + + /** The builder returned by this method only evaluates elements + * of collections added to it as needed. + * + * @tparam A the type of the ${coll}’s elements + * @return A builder for $Coll objects. + */ + def newBuilder[A]: Builder[A, LazyList[A]] = new LazyBuilder[A] + + private class LazyIterator[+A](private[this] var lazyList: LazyList[A]) extends AbstractIterator[A] { + override def hasNext: Boolean = !lazyList.isEmpty + + override def next(): A = + if (lazyList.isEmpty) Iterator.empty.next() + else { + val res = lazyList.head + lazyList = lazyList.tail + res + } + } + + private class SlidingIterator[A](private[this] var lazyList: LazyList[A], size: Int, step: Int) + extends AbstractIterator[LazyList[A]] { + private val minLen = size - step max 0 + private var first = true + + def hasNext: Boolean = + if (first) !lazyList.isEmpty + else lazyList.lengthGt(minLen) + + def next(): LazyList[A] = { + if (!hasNext) Iterator.empty.next() + else { + first = false + val list = lazyList + lazyList = list.drop(step) + list.take(size) + } + } + } + + private final class WithFilter[A] private[LazyList](lazyList: LazyList[A], p: A => Boolean) + extends collection.WithFilter[A, LazyList] { + private[this] val filtered = lazyList.filter(p) + def map[B](f: A => B): LazyList[B] = filtered.map(f) + def flatMap[B](f: A => IterableOnce[B]): LazyList[B] = filtered.flatMap(f) + def foreach[U](f: A => U): Unit = filtered.foreach(f) + def withFilter(q: A => Boolean): collection.WithFilter[A, LazyList] = new WithFilter(filtered, q) + } + + private final class LazyBuilder[A] extends ReusableBuilder[A, LazyList[A]] { + import LazyBuilder._ + + private[this] var next: DeferredState[A] = _ + private[this] var list: LazyList[A] = _ + + clear() + + override def clear(): Unit = { + val deferred = new DeferredState[A] + list = newLL(deferred.eval()) + next = deferred + } + + override def result(): LazyList[A] = { + next init State.Empty + list + } + + override def addOne(elem: A): this.type = { + val deferred = new DeferredState[A] + next init sCons(elem, newLL(deferred.eval())) + next = deferred + this + } + + // lazy implementation which doesn't evaluate the collection being added + override def addAll(xs: IterableOnce[A]): this.type = { + if (xs.knownSize != 0) { + val deferred = new DeferredState[A] + next init stateFromIteratorConcatSuffix(xs.iterator)(deferred.eval()) + next = deferred + } + this + } + } + + private object LazyBuilder { + final class DeferredState[A] { + private[this] var _state: () => State[A] = _ + + def eval(): State[A] = { + val state = _state + if (state == null) throw new IllegalStateException("uninitialized") + state() + } + + // racy + def init(state: => State[A]): Unit = { + if (_state != null) throw new IllegalStateException("already initialized") + _state = () => state + } + } + } + + /** This serialization proxy is used for LazyLists which start with a sequence of evaluated cons cells. + * The forced sequence is serialized in a compact, sequential format, followed by the unevaluated tail, which uses + * standard Java serialization to store the complete structure of unevaluated thunks. This allows the serialization + * of long evaluated lazy lists without exhausting the stack through recursive serialization of cons cells. + */ + @SerialVersionUID(3L) + final class SerializationProxy[A](@transient protected var coll: LazyList[A]) extends Serializable { + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + var these = coll + while (these.knownNonEmpty) { + out.writeObject(these.head) + these = these.tail + } + out.writeObject(SerializeEnd) + out.writeObject(these) + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + val init = new mutable.ListBuffer[A] + var initRead = false + while (!initRead) in.readObject match { + case SerializeEnd => initRead = true + case a => init += a.asInstanceOf[A] + } + val tail = in.readObject().asInstanceOf[LazyList[A]] + // scala/scala#10118: caution that no code path can evaluate `tail.state` + // before the resulting LazyList is returned + val it = init.toList.iterator + coll = newLL(stateFromIteratorConcatSuffix(it)(tail.state)) + } + + private[this] def readResolve(): Any = coll + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/List.scala b/tests/pos-special/stdlib/collection/immutable/List.scala index 6a305f4ebdec..5358922752fb 100644 --- a/tests/pos-special/stdlib/collection/immutable/List.scala +++ b/tests/pos-special/stdlib/collection/immutable/List.scala @@ -14,12 +14,11 @@ package scala package collection package immutable -import scala.annotation.unchecked.{uncheckedVariance, uncheckedCaptures} +import scala.annotation.unchecked.uncheckedVariance import scala.annotation.tailrec import mutable.{Builder, ListBuffer} import scala.collection.generic.DefaultSerializable import scala.runtime.Statics.releaseFence -import language.experimental.captureChecking /** A class for immutable linked lists representing ordered collections * of elements of type `A`. @@ -144,7 +143,7 @@ sealed abstract class List[+A] override def prepended[B >: A](elem: B): List[B] = elem :: this - override def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): List[B] = prefix match { + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): List[B] = prefix match { case xs: List[B] => xs ::: this case _ if prefix.knownSize == 0 => this case b: ListBuffer[B] if this.isEmpty => b.toList @@ -166,7 +165,7 @@ sealed abstract class List[+A] } // When calling appendAll with another list `suffix`, avoid copying `suffix` - override def appendedAll[B >: A](suffix: collection.IterableOnce[B]^): List[B] = suffix match { + override def appendedAll[B >: A](suffix: collection.IterableOnce[B]): List[B] = suffix match { case xs: List[B] => this ::: xs case _ => super.appendedAll(suffix) } @@ -215,7 +214,7 @@ sealed abstract class List[+A] // dropRight is inherited from LinearSeq override def splitAt(n: Int): (List[A], List[A]) = { - val b = new ListBuffer[A @uncheckedCaptures] + val b = new ListBuffer[A] var i = 0 var these = this while (!these.isEmpty && i < n) { @@ -258,7 +257,7 @@ sealed abstract class List[+A] } } - final override def collect[B](pf: PartialFunction[A, B]^): List[B] = { + final override def collect[B](pf: PartialFunction[A, B]): List[B] = { if (this eq Nil) Nil else { var rest = this var h: ::[B] = null @@ -286,7 +285,7 @@ sealed abstract class List[+A] } } - final override def flatMap[B](f: A => IterableOnce[B]^): List[B] = { + final override def flatMap[B](f: A => IterableOnce[B]): List[B] = { var rest = this var h: ::[B] = null var t: ::[B] = null @@ -307,7 +306,7 @@ sealed abstract class List[+A] } @inline final override def takeWhile(p: A => Boolean): List[A] = { - val b = new ListBuffer[A @uncheckedCaptures] + val b = new ListBuffer[A] var these = this while (!these.isEmpty && p(these.head)) { b += these.head @@ -317,7 +316,7 @@ sealed abstract class List[+A] } @inline final override def span(p: A => Boolean): (List[A], List[A]) = { - val b = new ListBuffer[A @uncheckedCaptures] + val b = new ListBuffer[A] var these = this while (!these.isEmpty && p(these.head)) { b += these.head @@ -652,7 +651,7 @@ sealed abstract class List[+A] // Internal code that mutates `next` _must_ call `Statics.releaseFence()` if either immediately, or // before a newly-allocated, thread-local :: instance is aliased (e.g. in ListBuffer.toList) -final case class :: [+A](override val head: A, private[scala] var next: List[A @uncheckedVariance @uncheckedCaptures]) // sound because `next` is used only locally +final case class :: [+A](override val head: A, private[scala] var next: List[A @uncheckedVariance]) // sound because `next` is used only locally extends List[A] { releaseFence() override def headOption: Some[A] = Some(head) @@ -667,7 +666,7 @@ case object Nil extends List[Nothing] { override def init: Nothing = throw new UnsupportedOperationException("init of empty list") override def knownSize: Int = 0 override def iterator: Iterator[Nothing] = Iterator.empty - override def unzip[A1, A2](implicit asPair: Nothing -> (A1, A2)): (List[A1], List[A2]) = EmptyUnzip + override def unzip[A1, A2](implicit asPair: Nothing => (A1, A2)): (List[A1], List[A2]) = EmptyUnzip @transient private[this] val EmptyUnzip = (Nil, Nil) @@ -682,9 +681,9 @@ case object Nil extends List[Nothing] { object List extends StrictOptimizedSeqFactory[List] { private val TupleOfNil = (Nil, Nil) - def from[B](coll: collection.IterableOnce[B]^): List[B] = Nil.prependedAll(coll) + def from[B](coll: collection.IterableOnce[B]): List[B] = Nil.prependedAll(coll) - def newBuilder[A]: Builder[A, List[A]] = new ListBuffer[A @uncheckedCaptures]() + def newBuilder[A]: Builder[A, List[A]] = new ListBuffer() def empty[A]: List[A] = Nil diff --git a/tests/pos-special/stdlib/collection/immutable/ListMap.scala b/tests/pos-special/stdlib/collection/immutable/ListMap.scala new file mode 100644 index 000000000000..4a2b8dbd807c --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/ListMap.scala @@ -0,0 +1,371 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.tailrec +import scala.collection.mutable.ReusableBuilder +import scala.collection.generic.DefaultSerializable +import scala.runtime.Statics.releaseFence +import scala.util.hashing.MurmurHash3 + +/** + * This class implements immutable maps using a list-based data structure. List map iterators and + * traversal methods visit key-value pairs in the order they were first inserted. + * + * Entries are stored internally in reversed insertion order, which means the newest key is at the + * head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and `init` + * are O(1). Other operations, such as inserting or removing entries, are also O(n), which makes + * this collection suitable only for a small number of elements. + * + * Instances of `ListMap` represent empty maps; they can be either created by calling the + * constructor directly, or by applying the function `ListMap.empty`. + * + * @tparam K the type of the keys contained in this list map + * @tparam V the type of the values associated with the keys + * + * @define Coll ListMap + * @define coll list map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed class ListMap[K, +V] + extends AbstractMap[K, V] + with SeqMap[K, V] + with StrictOptimizedMapOps[K, V, ListMap, ListMap[K, V]] + with MapFactoryDefaults[K, V, ListMap, Iterable] + with DefaultSerializable { + + override def mapFactory: MapFactory[ListMap] = ListMap + + override def size: Int = 0 + + override def isEmpty: Boolean = true + + override def knownSize: Int = 0 + def get(key: K): Option[V] = None + + def updated[V1 >: V](key: K, value: V1): ListMap[K, V1] = new ListMap.Node[K, V1](key, value, this) + + def removed(key: K): ListMap[K, V] = this + + def iterator: Iterator[(K, V)] = { + var curr: ListMap[K, V] = this + var res: List[(K, V)] = Nil + while (curr.nonEmpty) { + res = (curr.key, curr.value) :: res + curr = curr.next + } + res.iterator + } + + override def keys: Iterable[K] = { + var curr: ListMap[K, V] = this + var res: List[K] = Nil + while (curr.nonEmpty) { + res = curr.key :: res + curr = curr.next + } + res + } + + override def hashCode(): Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + // Can't efficiently override foreachEntry directly in ListMap because it would need to preserve iteration + // order be reversing the list first. But mapHash is symmetric so the reversed order is fine here. + val _reversed = new immutable.AbstractMap[K, V] { + override def isEmpty: Boolean = ListMap.this.isEmpty + override def removed(key: K): Map[K, V] = ListMap.this.removed(key) + override def updated[V1 >: V](key: K, value: V1): Map[K, V1] = ListMap.this.updated(key, value) + override def get(key: K): Option[V] = ListMap.this.get(key) + override def iterator: Iterator[(K, V)] = ListMap.this.iterator + override def foreachEntry[U](f: (K, V) => U): Unit = { + var curr: ListMap[K, V] = ListMap.this + while (curr.nonEmpty) { + f(curr.key, curr.value) + curr = curr.next + } + } + } + MurmurHash3.mapHash(_reversed) + } + } + + private[immutable] def key: K = throw new NoSuchElementException("key of empty map") + private[immutable] def value: V = throw new NoSuchElementException("value of empty map") + private[immutable] def next: ListMap[K, V] = throw new NoSuchElementException("next of empty map") + + override def foldRight[Z](z: Z)(op: ((K, V), Z) => Z): Z = ListMap.foldRightInternal(this, z, op) + override protected[this] def className = "ListMap" + +} + +/** + * $factoryInfo + * + * Note that each element insertion takes O(n) time, which means that creating a list map with + * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of + * elements. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#list-maps "Scala's Collection Library overview"]] + * section on `List Maps` for more information. + * @define Coll ListMap + * @define coll list map + */ +@SerialVersionUID(3L) +object ListMap extends MapFactory[ListMap] { + /** + * Represents an entry in the `ListMap`. + */ + private[immutable] final class Node[K, V]( + override private[immutable] val key: K, + private[immutable] var _value: V, + private[immutable] var _init: ListMap[K, V] + ) extends ListMap[K, V] { + releaseFence() + + override private[immutable] def value: V = _value + + override def size: Int = sizeInternal(this, 0) + + @tailrec private[this] def sizeInternal(cur: ListMap[K, V], acc: Int): Int = + if (cur.isEmpty) acc + else sizeInternal(cur.next, acc + 1) + + override def isEmpty: Boolean = false + + override def knownSize: Int = -1 + + @throws[NoSuchElementException] + override def apply(k: K): V = applyInternal(this, k) + + @tailrec private[this] def applyInternal(cur: ListMap[K, V], k: K): V = + if (cur.isEmpty) throw new NoSuchElementException("key not found: " + k) + else if (k == cur.key) cur.value + else applyInternal(cur.next, k) + + override def get(k: K): Option[V] = getInternal(this, k) + + @tailrec private[this] def getInternal(cur: ListMap[K, V], k: K): Option[V] = + if (cur.isEmpty) None + else if (k == cur.key) Some(cur.value) + else getInternal(cur.next, k) + + override def contains(k: K): Boolean = containsInternal(this, k) + + @tailrec private[this] def containsInternal(cur: ListMap[K, V], k: K): Boolean = + if (cur.isEmpty) false + else if (k == cur.key) true + else containsInternal(cur.next, k) + + override def updated[V1 >: V](k: K, v: V1): ListMap[K, V1] = { + + var index = -1 // the index (in reverse) where the key to update exists, if it is found + var found = false // true if the key is found int he map + var isDifferent = false // true if the key was found and the values are different + + { + var curr: ListMap[K, V] = this + + while (curr.nonEmpty && !found) { + if (k == curr.key) { + found = true + isDifferent = v.asInstanceOf[AnyRef] ne curr.value.asInstanceOf[AnyRef] + } + index += 1 + curr = curr.init + } + } + + if (found) { + if (isDifferent) { + var newHead: ListMap.Node[K, V1] = null + var prev: ListMap.Node[K, V1] = null + var curr: ListMap[K, V1] = this + var i = 0 + while (i < index) { + val temp = new ListMap.Node(curr.key, curr.value, null) + if (prev ne null) { + prev._init = temp + } + prev = temp + curr = curr.init + if (newHead eq null) { + newHead = prev + } + i += 1 + } + val newNode = new ListMap.Node(curr.key, v, curr.init) + if (prev ne null) { + prev._init = newNode + } + releaseFence() + if (newHead eq null) newNode else newHead + } else { + this + } + } else { + new ListMap.Node(k, v, this) + } + } + + @tailrec private[this] def removeInternal(k: K, cur: ListMap[K, V], acc: List[ListMap[K, V]]): ListMap[K, V] = + if (cur.isEmpty) acc.last + else if (k == cur.key) acc.foldLeft(cur.next) { (t, h) => new Node(h.key, h.value, t) } + else removeInternal(k, cur.next, cur :: acc) + + override def removed(k: K): ListMap[K, V] = removeInternal(k, this, Nil) + + override private[immutable] def next: ListMap[K, V] = _init + + override def last: (K, V) = (key, value) + override def init: ListMap[K, V] = next + + } + + def empty[K, V]: ListMap[K, V] = EmptyListMap.asInstanceOf[ListMap[K, V]] + + private object EmptyListMap extends ListMap[Any, Nothing] + + def from[K, V](it: collection.IterableOnce[(K, V)]): ListMap[K, V] = + it match { + case lm: ListMap[K, V] => lm + case lhm: collection.mutable.LinkedHashMap[K, V] => + // by directly iterating through LinkedHashMap entries, we save creating intermediate tuples for each + // key-value pair + var current: ListMap[K, V] = empty[K, V] + var firstEntry = lhm._firstEntry + while (firstEntry ne null) { + current = new Node(firstEntry.key, firstEntry.value, current) + firstEntry = firstEntry.later + } + current + case _: collection.Map[K, V] | _: collection.MapView[K, V] => + // when creating from a map, we need not handle duplicate keys, so we can just append each key-value to the end + var current: ListMap[K, V] = empty[K, V] + val iter = it.iterator + while (iter.hasNext) { + val (k, v) = iter.next() + current = new Node(k, v, current) + } + current + + case _ => (newBuilder[K, V] ++= it).result() + } + + /** Returns a new ListMap builder + * + * The implementation safely handles additions after `result()` without calling `clear()` + * + * @tparam K the map key type + * @tparam V the map value type + */ + def newBuilder[K, V]: ReusableBuilder[(K, V), ListMap[K, V]] = new ListMapBuilder[K, V] + + @tailrec private def foldRightInternal[K, V, Z](map: ListMap[K, V], prevValue: Z, op: ((K, V), Z) => Z): Z = { + if (map.isEmpty) prevValue + else foldRightInternal(map.init, op(map.last, prevValue), op) + } +} + +/** Builder for ListMap. + * $multipleResults + */ +private[immutable] final class ListMapBuilder[K, V] extends mutable.ReusableBuilder[(K, V), ListMap[K, V]] { + private[this] var isAliased: Boolean = false + private[this] var underlying: ListMap[K, V] = ListMap.empty + + override def clear(): Unit = { + underlying = ListMap.empty + isAliased = false + } + + override def result(): ListMap[K, V] = { + isAliased = true + releaseFence() + underlying + } + + override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) + + @tailrec + private[this] def insertValueAtKeyReturnFound(m: ListMap[K, V], key: K, value: V): Boolean = m match { + case n: ListMap.Node[K, V] => + if (n.key == key) { + n._value = value + true + } else { + insertValueAtKeyReturnFound(n.init, key, value) + } + case _ => false + } + + def addOne(key: K, value: V): this.type = { + if (isAliased) { + underlying = underlying.updated(key, value) + } else { + if (!insertValueAtKeyReturnFound(underlying, key, value)) { + underlying = new ListMap.Node(key, value, underlying) + } + } + this + } + override def addAll(xs: IterableOnce[(K, V)]): this.type = { + if (isAliased) { + super.addAll(xs) + } else if (underlying.nonEmpty) { + xs match { + case m: collection.Map[K, V] => + // if it is a map, then its keys will not collide with themselves. + // therefor we only need to check the already-existing elements for collisions. + // No need to check the entire list + + val iter = m.iterator + var newUnderlying = underlying + while (iter.hasNext) { + val next = iter.next() + if (!insertValueAtKeyReturnFound(underlying, next._1, next._2)) { + newUnderlying = new ListMap.Node[K, V](next._1, next._2, newUnderlying) + } + } + underlying = newUnderlying + this + + case _ => + super.addAll(xs) + } + } else xs match { + case lhm: collection.mutable.LinkedHashMap[K, V] => + // special-casing LinkedHashMap avoids creating of Iterator and tuples for each key-value + var firstEntry = lhm._firstEntry + while (firstEntry ne null) { + underlying = new ListMap.Node(firstEntry.key, firstEntry.value, underlying) + firstEntry = firstEntry.later + } + this + + case _: collection.Map[K, V] | _: collection.MapView[K, V] => + val iter = xs.iterator + while (iter.hasNext) { + val (k, v) = iter.next() + underlying = new ListMap.Node(k, v, underlying) + } + + this + case _ => + super.addAll(xs) + } + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/ListSet.scala b/tests/pos-special/stdlib/collection/immutable/ListSet.scala new file mode 100644 index 000000000000..e2ab0de858da --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/ListSet.scala @@ -0,0 +1,138 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import mutable.{Builder, ImmutableBuilder} +import scala.annotation.tailrec +import scala.collection.generic.DefaultSerializable + +/** + * This class implements immutable sets using a list-based data structure. List set iterators and + * traversal methods visit elements in the order they were first inserted. + * + * Elements are stored internally in reversed insertion order, which means the newest element is at + * the head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and + * `init` are O(1). Other operations, such as inserting or removing entries, are also O(n), which + * makes this collection suitable only for a small number of elements. + * + * Instances of `ListSet` represent empty sets; they can be either created by calling the + * constructor directly, or by applying the function `ListSet.empty`. + * + * @tparam A the type of the elements contained in this list set + * + * @define Coll ListSet + * @define coll list set + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed class ListSet[A] + extends AbstractSet[A] + with StrictOptimizedSetOps[A, ListSet, ListSet[A]] + with IterableFactoryDefaults[A, ListSet] + with DefaultSerializable { + + override protected[this] def className: String = "ListSet" + + override def size: Int = 0 + override def knownSize: Int = 0 + override def isEmpty: Boolean = true + + def contains(elem: A): Boolean = false + + def incl(elem: A): ListSet[A] = new Node(elem) + def excl(elem: A): ListSet[A] = this + + def iterator: scala.collection.Iterator[A] = { + var curr: ListSet[A] = this + var res: List[A] = Nil + while (!curr.isEmpty) { + res = curr.elem :: res + curr = curr.next + } + res.iterator + } + + protected def elem: A = throw new NoSuchElementException("elem of empty set") + protected def next: ListSet[A] = throw new NoSuchElementException("next of empty set") + + override def iterableFactory: IterableFactory[ListSet] = ListSet + + /** + * Represents an entry in the `ListSet`. + */ + protected class Node(override protected val elem: A) extends ListSet[A] { + + override def size = sizeInternal(this, 0) + override def knownSize: Int = -1 + @tailrec private[this] def sizeInternal(n: ListSet[A], acc: Int): Int = + if (n.isEmpty) acc + else sizeInternal(n.next, acc + 1) + + override def isEmpty: Boolean = false + + override def contains(e: A): Boolean = containsInternal(this, e) + + @tailrec private[this] def containsInternal(n: ListSet[A], e: A): Boolean = + !n.isEmpty && (n.elem == e || containsInternal(n.next, e)) + + override def incl(e: A): ListSet[A] = if (contains(e)) this else new Node(e) + + override def excl(e: A): ListSet[A] = removeInternal(e, this, Nil) + + @tailrec private[this] def removeInternal(k: A, cur: ListSet[A], acc: List[ListSet[A]]): ListSet[A] = + if (cur.isEmpty) acc.last + else if (k == cur.elem) acc.foldLeft(cur.next)((t, h) => new t.Node(h.elem)) + else removeInternal(k, cur.next, cur :: acc) + + override protected def next: ListSet[A] = ListSet.this + + override def last: A = elem + + override def init: ListSet[A] = next + } +} + +/** + * $factoryInfo + * + * Note that each element insertion takes O(n) time, which means that creating a list set with + * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of + * elements. + * + * @define Coll ListSet + * @define coll list set + */ +@SerialVersionUID(3L) +object ListSet extends IterableFactory[ListSet] { + + def from[E](it: scala.collection.IterableOnce[E]): ListSet[E] = + it match { + case ls: ListSet[E] => ls + case _ if it.knownSize == 0 => empty[E] + case _ => (newBuilder[E] ++= it).result() + } + + private object EmptyListSet extends ListSet[Any] { + override def knownSize: Int = 0 + } + private[collection] def emptyInstance: ListSet[Any] = EmptyListSet + + def empty[A]: ListSet[A] = EmptyListSet.asInstanceOf[ListSet[A]] + + def newBuilder[A]: Builder[A, ListSet[A]] = + new ImmutableBuilder[A, ListSet[A]](empty) { + def addOne(elem: A): this.type = { elems = elems + elem; this } + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/LongMap.scala b/tests/pos-special/stdlib/collection/immutable/LongMap.scala new file mode 100644 index 000000000000..c418dc7616ac --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/LongMap.scala @@ -0,0 +1,490 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import java.lang.IllegalStateException + +import scala.collection.generic.{BitOperations, DefaultSerializationProxy} +import scala.collection.mutable.{Builder, ImmutableBuilder, ListBuffer} +import scala.annotation.tailrec +import scala.annotation.unchecked.uncheckedVariance +import scala.language.implicitConversions + +/** Utility class for long maps. + */ +private[immutable] object LongMapUtils extends BitOperations.Long { + def branchMask(i: Long, j: Long) = highestOneBit(i ^ j) + + def join[T](p1: Long, t1: LongMap[T], p2: Long, t2: LongMap[T]): LongMap[T] = { + val m = branchMask(p1, p2) + val p = mask(p1, m) + if (zero(p1, m)) LongMap.Bin(p, m, t1, t2) + else LongMap.Bin(p, m, t2, t1) + } + + def bin[T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]): LongMap[T] = (left, right) match { + case (left, LongMap.Nil) => left + case (LongMap.Nil, right) => right + case (left, right) => LongMap.Bin(prefix, mask, left, right) + } +} + +import LongMapUtils._ + +/** A companion object for long maps. + * + * @define Coll `LongMap` + */ +object LongMap { + def empty[T]: LongMap[T] = LongMap.Nil + def singleton[T](key: Long, value: T): LongMap[T] = LongMap.Tip(key, value) + def apply[T](elems: (Long, T)*): LongMap[T] = + elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) + + def from[V](coll: IterableOnce[(Long, V)]): LongMap[V] = + newBuilder[V].addAll(coll).result() + + def newBuilder[V]: Builder[(Long, V), LongMap[V]] = + new ImmutableBuilder[(Long, V), LongMap[V]](empty) { + def addOne(elem: (Long, V)): this.type = { elems = elems + elem; this } + } + + private[immutable] case object Nil extends LongMap[Nothing] { + // Important, don't remove this! See IntMap for explanation. + override def equals(that : Any) = that match { + case _: this.type => true + case _: LongMap[_] => false // The only empty LongMaps are eq Nil + case _ => super.equals(that) + } + } + + private[immutable] case class Tip[+T](key: Long, value: T) extends LongMap[T] { + def withValue[S](s: S) = + if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap.Tip[S]] + else LongMap.Tip(key, s) + } + + private[immutable] case class Bin[+T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]) extends LongMap[T] { + def bin[S](left: LongMap[S], right: LongMap[S]): LongMap[S] = { + if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[LongMap.Bin[S]] + else LongMap.Bin[S](prefix, mask, left, right) + } + } + + implicit def toFactory[V](dummy: LongMap.type): Factory[(Long, V), LongMap[V]] = ToFactory.asInstanceOf[Factory[(Long, V), LongMap[V]]] + + @SerialVersionUID(3L) + private[this] object ToFactory extends Factory[(Long, AnyRef), LongMap[AnyRef]] with Serializable { + def fromSpecific(it: IterableOnce[(Long, AnyRef)]): LongMap[AnyRef] = LongMap.from[AnyRef](it) + def newBuilder: Builder[(Long, AnyRef), LongMap[AnyRef]] = LongMap.newBuilder[AnyRef] + } + + implicit def toBuildFrom[V](factory: LongMap.type): BuildFrom[Any, (Long, V), LongMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Long, V), LongMap[V]]] + private[this] object ToBuildFrom extends BuildFrom[Any, (Long, AnyRef), LongMap[AnyRef]] { + def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]) = LongMap.from(it) + def newBuilder(from: Any) = LongMap.newBuilder[AnyRef] + } + + implicit def iterableFactory[V]: Factory[(Long, V), LongMap[V]] = toFactory(this) + implicit def buildFromLongMap[V]: BuildFrom[LongMap[_], (Long, V), LongMap[V]] = toBuildFrom(this) +} + +// Iterator over a non-empty LongMap. +private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends AbstractIterator[T] { + + // Basically this uses a simple stack to emulate conversion over the tree. However + // because we know that Longs are only 64 bits we can have at most 64 LongMap.Bins and + // one LongMap.Tip sitting on the tree at any point. Therefore we know the maximum stack + // depth is 65 + var index = 0 + var buffer = new Array[AnyRef](65) + + def pop() = { + index -= 1 + buffer(index).asInstanceOf[LongMap[V]] + } + + def push(x: LongMap[V]): Unit = { + buffer(index) = x.asInstanceOf[AnyRef] + index += 1 + } + push(it) + + /** + * What value do we assign to a tip? + */ + def valueOf(tip: LongMap.Tip[V]): T + + def hasNext = index != 0 + @tailrec + final def next(): T = + pop() match { + case LongMap.Bin(_,_, t@LongMap.Tip(_, _), right) => { + push(right) + valueOf(t) + } + case LongMap.Bin(_, _, left, right) => { + push(right) + push(left) + next() + } + case t@LongMap.Tip(_, _) => valueOf(t) + // This should never happen. We don't allow LongMap.Nil in subtrees of the LongMap + // and don't return an LongMapIterator for LongMap.Nil. + case LongMap.Nil => throw new IllegalStateException("Empty maps not allowed as subtrees") + } +} + +private[immutable] class LongMapEntryIterator[V](it: LongMap[V]) extends LongMapIterator[V, (Long, V)](it){ + def valueOf(tip: LongMap.Tip[V]) = (tip.key, tip.value) +} + +private[immutable] class LongMapValueIterator[V](it: LongMap[V]) extends LongMapIterator[V, V](it){ + def valueOf(tip: LongMap.Tip[V]) = tip.value +} + +private[immutable] class LongMapKeyIterator[V](it: LongMap[V]) extends LongMapIterator[V, Long](it){ + def valueOf(tip: LongMap.Tip[V]) = tip.key +} + +/** + * Specialised immutable map structure for long keys, based on + * [[https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.37.5452 Fast Mergeable Long Maps]] + * by Okasaki and Gill. Essentially a trie based on binary digits of the integers. + * + * Note: This class is as of 2.8 largely superseded by HashMap. + * + * @tparam T type of the values associated with the long keys. + * + * @define Coll `immutable.LongMap` + * @define coll immutable long integer map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed abstract class LongMap[+T] extends AbstractMap[Long, T] + with StrictOptimizedMapOps[Long, T, Map, LongMap[T]] + with Serializable { + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, T)] @uncheckedVariance): LongMap[T] = { + //TODO should this be the default implementation of this method in StrictOptimizedIterableOps? + val b = newSpecificBuilder + b.sizeHint(coll) + b.addAll(coll) + b.result() + } + override protected def newSpecificBuilder: Builder[(Long, T), LongMap[T]] @uncheckedVariance = + new ImmutableBuilder[(Long, T), LongMap[T]](empty) { + def addOne(elem: (Long, T)): this.type = { elems = elems + elem; this } + } + + override def empty: LongMap[T] = LongMap.Nil + + override def toList = { + val buffer = new ListBuffer[(Long, T)] + foreach(buffer += _) + buffer.toList + } + + /** + * Iterator over key, value pairs of the map in unsigned order of the keys. + * + * @return an iterator over pairs of long keys and corresponding values. + */ + def iterator: Iterator[(Long, T)] = this match { + case LongMap.Nil => Iterator.empty + case _ => new LongMapEntryIterator(this) + } + + /** + * Loops over the key, value pairs of the map in unsigned order of the keys. + */ + override final def foreach[U](f: ((Long, T)) => U): Unit = this match { + case LongMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) } + case LongMap.Tip(key, value) => f((key, value)) + case LongMap.Nil => + } + + override final def foreachEntry[U](f: (Long, T) => U): Unit = this match { + case LongMap.Bin(_, _, left, right) => { left.foreachEntry(f); right.foreachEntry(f) } + case LongMap.Tip(key, value) => f(key, value) + case LongMap.Nil => + } + + override def keysIterator: Iterator[Long] = this match { + case LongMap.Nil => Iterator.empty + case _ => new LongMapKeyIterator(this) + } + + /** + * Loop over the keys of the map. The same as keys.foreach(f), but may + * be more efficient. + * + * @param f The loop body + */ + final def foreachKey[U](f: Long => U): Unit = this match { + case LongMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) } + case LongMap.Tip(key, _) => f(key) + case LongMap.Nil => + } + + override def valuesIterator: Iterator[T] = this match { + case LongMap.Nil => Iterator.empty + case _ => new LongMapValueIterator(this) + } + + /** + * Loop over the values of the map. The same as values.foreach(f), but may + * be more efficient. + * + * @param f The loop body + */ + final def foreachValue[U](f: T => U): Unit = this match { + case LongMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) } + case LongMap.Tip(_, value) => f(value) + case LongMap.Nil => + } + + override protected[this] def className = "LongMap" + + override def isEmpty = this eq LongMap.Nil + override def knownSize: Int = if (isEmpty) 0 else super.knownSize + override def filter(f: ((Long, T)) => Boolean): LongMap[T] = this match { + case LongMap.Bin(prefix, mask, left, right) => { + val (newleft, newright) = (left.filter(f), right.filter(f)) + if ((left eq newleft) && (right eq newright)) this + else bin(prefix, mask, newleft, newright) + } + case LongMap.Tip(key, value) => + if (f((key, value))) this + else LongMap.Nil + case LongMap.Nil => LongMap.Nil + } + + override def transform[S](f: (Long, T) => S): LongMap[S] = this match { + case b@LongMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f)) + case t@LongMap.Tip(key, value) => t.withValue(f(key, value)) + case LongMap.Nil => LongMap.Nil + } + + final override def size: Int = this match { + case LongMap.Nil => 0 + case LongMap.Tip(_, _) => 1 + case LongMap.Bin(_, _, left, right) => left.size + right.size + } + + @tailrec + final def get(key: Long): Option[T] = this match { + case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key) + case LongMap.Tip(key2, value) => if (key == key2) Some(value) else None + case LongMap.Nil => None + } + + @tailrec + final override def getOrElse[S >: T](key: Long, default: => S): S = this match { + case LongMap.Nil => default + case LongMap.Tip(key2, value) => if (key == key2) value else default + case LongMap.Bin(prefix, mask, left, right) => + if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default) + } + + @tailrec + final override def apply(key: Long): T = this match { + case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key) + case LongMap.Tip(key2, value) => if (key == key2) value else throw new IllegalArgumentException("Key not found") + case LongMap.Nil => throw new IllegalArgumentException("key not found") + } + + override def + [S >: T] (kv: (Long, S)): LongMap[S] = updated(kv._1, kv._2) + + override def updated[S >: T](key: Long, value: S): LongMap[S] = this match { + case LongMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this) + else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updated(key, value), right) + else LongMap.Bin(prefix, mask, left, right.updated(key, value)) + case LongMap.Tip(key2, value2) => + if (key == key2) LongMap.Tip(key, value) + else join(key, LongMap.Tip(key, value), key2, this) + case LongMap.Nil => LongMap.Tip(key, value) + } + + /** + * Updates the map, using the provided function to resolve conflicts if the key is already present. + * + * Equivalent to + * {{{ + * this.get(key) match { + * case None => this.update(key, value) + * case Some(oldvalue) => this.update(key, f(oldvalue, value) + * } + * }}} + * + * @tparam S The supertype of values in this `LongMap`. + * @param key The key to update. + * @param value The value to use if there is no conflict. + * @param f The function used to resolve conflicts. + * @return The updated map. + */ + def updateWith[S >: T](key: Long, value: S, f: (T, S) => S): LongMap[S] = this match { + case LongMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this) + else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updateWith(key, value, f), right) + else LongMap.Bin(prefix, mask, left, right.updateWith(key, value, f)) + case LongMap.Tip(key2, value2) => + if (key == key2) LongMap.Tip(key, f(value2, value)) + else join(key, LongMap.Tip(key, value), key2, this) + case LongMap.Nil => LongMap.Tip(key, value) + } + + def removed(key: Long): LongMap[T] = this match { + case LongMap.Bin(prefix, mask, left, right) => + if (!hasMatch(key, prefix, mask)) this + else if (zero(key, mask)) bin(prefix, mask, left - key, right) + else bin(prefix, mask, left, right - key) + case LongMap.Tip(key2, _) => + if (key == key2) LongMap.Nil + else this + case LongMap.Nil => LongMap.Nil + } + + /** + * A combined transform and filter function. Returns an `LongMap` such that + * for each `(key, value)` mapping in this map, if `f(key, value) == None` + * the map contains no mapping for key, and if `f(key, value)`. + * + * @tparam S The type of the values in the resulting `LongMap`. + * @param f The transforming function. + * @return The modified map. + */ + def modifyOrRemove[S](f: (Long, T) => Option[S]): LongMap[S] = this match { + case LongMap.Bin(prefix, mask, left, right) => { + val newleft = left.modifyOrRemove(f) + val newright = right.modifyOrRemove(f) + if ((left eq newleft) && (right eq newright)) this.asInstanceOf[LongMap[S]] + else bin(prefix, mask, newleft, newright) + } + case LongMap.Tip(key, value) => f(key, value) match { + case None => LongMap.Nil + case Some(value2) => + //hack to preserve sharing + if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap[S]] + else LongMap.Tip(key, value2) + } + case LongMap.Nil => LongMap.Nil + } + + /** + * Forms a union map with that map, using the combining function to resolve conflicts. + * + * @tparam S The type of values in `that`, a supertype of values in `this`. + * @param that The map to form a union with. + * @param f The function used to resolve conflicts between two mappings. + * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`. + */ + def unionWith[S >: T](that: LongMap[S], f: (Long, S, S) => S): LongMap[S] = (this, that) match{ + case (LongMap.Bin(p1, m1, l1, r1), that@(LongMap.Bin(p2, m2, l2, r2))) => + if (shorter(m1, m2)) { + if (!hasMatch(p2, p1, m1)) join(p1, this, p2, that) + else if (zero(p2, m1)) LongMap.Bin(p1, m1, l1.unionWith(that, f), r1) + else LongMap.Bin(p1, m1, l1, r1.unionWith(that, f)) + } else if (shorter(m2, m1)){ + if (!hasMatch(p1, p2, m2)) join(p1, this, p2, that) + else if (zero(p1, m2)) LongMap.Bin(p2, m2, this.unionWith(l2, f), r2) + else LongMap.Bin(p2, m2, l2, this.unionWith(r2, f)) + } + else { + if (p1 == p2) LongMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f)) + else join(p1, this, p2, that) + } + case (LongMap.Tip(key, value), x) => x.updateWith(key, value, (x, y) => f(key, y, x)) + case (x, LongMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y)) + case (LongMap.Nil, x) => x + case (x, LongMap.Nil) => x + } + + /** + * Forms the intersection of these two maps with a combining function. The + * resulting map is a map that has only keys present in both maps and has + * values produced from the original mappings by combining them with `f`. + * + * @tparam S The type of values in `that`. + * @tparam R The type of values in the resulting `LongMap`. + * @param that The map to intersect with. + * @param f The combining function. + * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`. + */ + def intersectionWith[S, R](that: LongMap[S], f: (Long, T, S) => R): LongMap[R] = (this, that) match { + case (LongMap.Bin(p1, m1, l1, r1), that@LongMap.Bin(p2, m2, l2, r2)) => + if (shorter(m1, m2)) { + if (!hasMatch(p2, p1, m1)) LongMap.Nil + else if (zero(p2, m1)) l1.intersectionWith(that, f) + else r1.intersectionWith(that, f) + } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f)) + else { + if (!hasMatch(p1, p2, m2)) LongMap.Nil + else if (zero(p1, m2)) this.intersectionWith(l2, f) + else this.intersectionWith(r2, f) + } + case (LongMap.Tip(key, value), that) => that.get(key) match { + case None => LongMap.Nil + case Some(value2) => LongMap.Tip(key, f(key, value, value2)) + } + case (_, LongMap.Tip(key, value)) => this.get(key) match { + case None => LongMap.Nil + case Some(value2) => LongMap.Tip(key, f(key, value2, value)) + } + case (_, _) => LongMap.Nil + } + + /** + * Left biased intersection. Returns the map that has all the same mappings as this but only for keys + * which are present in the other map. + * + * @tparam R The type of values in `that`. + * @param that The map to intersect with. + * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`. + */ + def intersection[R](that: LongMap[R]): LongMap[T] = + this.intersectionWith(that, (key: Long, value: T, value2: R) => value) + + def ++[S >: T](that: LongMap[S]) = + this.unionWith[S](that, (key, x, y) => y) + + @tailrec + final def firstKey: Long = this match { + case LongMap.Bin(_, _, l, r) => l.firstKey + case LongMap.Tip(k, v) => k + case LongMap.Nil => throw new IllegalStateException("Empty set") + } + + @tailrec + final def lastKey: Long = this match { + case LongMap.Bin(_, _, l, r) => r.lastKey + case LongMap.Tip(k , v) => k + case LongMap.Nil => throw new IllegalStateException("Empty set") + } + + def map[V2](f: ((Long, T)) => (Long, V2)): LongMap[V2] = LongMap.from(new View.Map(coll, f)) + + def flatMap[V2](f: ((Long, T)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f)) + + override def concat[V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]): LongMap[V1] = + super.concat(that).asInstanceOf[LongMap[V1]] // Already has correct type but not declared as such + + override def ++ [V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]): LongMap[V1] = concat(that) + + def collect[V2](pf: PartialFunction[(Long, T), (Long, V2)]): LongMap[V2] = + strictOptimizedCollect(LongMap.newBuilder[V2], pf) + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(LongMap.toFactory[T](LongMap), this) +} diff --git a/tests/pos-special/stdlib/collection/immutable/Map.scala b/tests/pos-special/stdlib/collection/immutable/Map.scala new file mode 100644 index 000000000000..9d334893b8cc --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Map.scala @@ -0,0 +1,692 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.Map.Map4 +import scala.collection.mutable.{Builder, ReusableBuilder} + +/** Base type of immutable Maps */ +trait Map[K, +V] + extends Iterable[(K, V)] + with collection.Map[K, V] + with MapOps[K, V, Map, Map[K, V]] + with MapFactoryDefaults[K, V, Map, Iterable] { + + override def mapFactory: scala.collection.MapFactory[Map] = Map + + override final def toMap[K2, V2](implicit ev: (K, V) <:< (K2, V2)): Map[K2, V2] = this.asInstanceOf[Map[K2, V2]] + + /** The same map with a given default function. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefault`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefault[V1 >: V](d: K => V1): Map[K, V1] = new Map.WithDefault[K, V1](this, d) + + /** The same map with a given default value. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefaultValue[V1 >: V](d: V1): Map[K, V1] = new Map.WithDefault[K, V1](this, _ => d) +} + +/** Base trait of immutable Maps implementations + * + * @define coll immutable map + * @define Coll `immutable.Map` + */ +trait MapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] + extends IterableOps[(K, V), Iterable, C] + with collection.MapOps[K, V, CC, C] { + + protected def coll: C with CC[K, V] + + /** Removes a key from this map, returning a new map. + * + * @param key the key to be removed + * @return a new map without a binding for ''key'' + */ + def removed(key: K): C + + /** Alias for `removed` */ + @`inline` final def - (key: K): C = removed(key) + + @deprecated("Use -- with an explicit collection", "2.13.0") + def - (key1: K, key2: K, keys: K*): C = removed(key1).removed(key2).removedAll(keys) + + /** Creates a new $coll from this $coll by removing all elements of another + * collection. + * + * $willForceEvaluation + * + * @param keys the collection containing the removed elements. + * @return a new $coll that contains all elements of the current $coll + * except one less occurrence of each of the elements of `elems`. + */ + def removedAll(keys: IterableOnce[K]): C = keys.iterator.foldLeft[C](coll)(_ - _) + + /** Alias for `removedAll` */ + @`inline` final override def -- (keys: IterableOnce[K]): C = removedAll(keys) + + /** Creates a new map obtained by updating this map with a given key/value pair. + * @param key the key + * @param value the value + * @tparam V1 the type of the added value + * @return A new map with the new key/value mapping added to this map. + */ + def updated[V1 >: V](key: K, value: V1): CC[K, V1] + + /** + * Update a mapping for the specified key and its current optionally-mapped value + * (`Some` if there is current mapping, `None` if not). + * + * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`. + * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent). + * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged. + * + * @param key the key value + * @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping + * @return A new map with the updated mapping with the key + */ + def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): CC[K,V1] = { + val previousValue = this.get(key) + remappingFunction(previousValue) match { + case None => previousValue.fold(coll)(_ => this.removed(key).coll) + case Some(nextValue) => + if (previousValue.exists(_.asInstanceOf[AnyRef] eq nextValue.asInstanceOf[AnyRef])) coll + else coll.updated(key, nextValue) + } + } + + /** + * Alias for `updated` + * + * @param kv the key/value pair. + * @tparam V1 the type of the value in the key/value pair. + * @return A new map with the new binding added to this map. + */ + override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = updated(kv._1, kv._2) + + /** This function transforms all the values of mappings contained + * in this map with function `f`. + * + * @param f A function over keys and values + * @return the updated map + */ + def transform[W](f: (K, V) => W): CC[K, W] = map { case (k, v) => (k, f(k, v)) } + + override def keySet: Set[K] = new ImmutableKeySet + + /** The implementation class of the set returned by `keySet` */ + protected class ImmutableKeySet extends AbstractSet[K] with GenKeySet with DefaultSerializable { + def incl(elem: K): Set[K] = if (this(elem)) this else empty ++ this + elem + def excl(elem: K): Set[K] = if (this(elem)) empty ++ this - elem else this + } + +} + +trait StrictOptimizedMapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] + extends MapOps[K, V, CC, C] + with collection.StrictOptimizedMapOps[K, V, CC, C] + with StrictOptimizedIterableOps[(K, V), Iterable, C] { + + override def concat [V1 >: V](that: collection.IterableOnce[(K, V1)]): CC[K, V1] = { + var result: CC[K, V1] = coll + val it = that.iterator + while (it.hasNext) result = result + it.next() + result + } +} + + +/** + * $factoryInfo + * @define coll immutable map + * @define Coll `immutable.Map` + */ +@SerialVersionUID(3L) +object Map extends MapFactory[Map] { + + @SerialVersionUID(3L) + class WithDefault[K, +V](val underlying: Map[K, V], val defaultValue: K => V) + extends AbstractMap[K, V] + with MapOps[K, V, Map, WithDefault[K, V]] with Serializable { + + def get(key: K): Option[V] = underlying.get(key) + + override def default(key: K): V = defaultValue(key) + + override def iterableFactory: IterableFactory[Iterable] = underlying.iterableFactory + + def iterator: Iterator[(K, V)] = underlying.iterator + + override def isEmpty: Boolean = underlying.isEmpty + + override def mapFactory: MapFactory[Map] = underlying.mapFactory + + override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]): WithDefault[K, V2] = + new WithDefault(underlying.concat(xs), defaultValue) + + def removed(key: K): WithDefault[K, V] = new WithDefault[K, V](underlying.removed(key), defaultValue) + + def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = + new WithDefault[K, V1](underlying.updated(key, value), defaultValue) + + override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) + + override protected def fromSpecific(coll: collection.IterableOnce[(K, V)] @uncheckedVariance): WithDefault[K, V] = + new WithDefault[K, V](mapFactory.from(coll), defaultValue) + + override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] @uncheckedVariance = + Map.newBuilder.mapResult((p: Map[K, V]) => new WithDefault[K, V](p, defaultValue)) + } + + def empty[K, V]: Map[K, V] = EmptyMap.asInstanceOf[Map[K, V]] + + def from[K, V](it: collection.IterableOnce[(K, V)]): Map[K, V] = + it match { + case it: Iterable[_] if it.isEmpty => empty[K, V] + case m: Map[K, V] => m + case _ => (newBuilder[K, V] ++= it).result() + } + + def newBuilder[K, V]: Builder[(K, V), Map[K, V]] = new MapBuilderImpl + + @SerialVersionUID(3L) + private object EmptyMap extends AbstractMap[Any, Nothing] with Serializable { + override def size: Int = 0 + override def knownSize: Int = 0 + override def isEmpty: Boolean = true + override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key) + override def contains(key: Any) = false + def get(key: Any): Option[Nothing] = None + override def getOrElse [V1](key: Any, default: => V1): V1 = default + def iterator: Iterator[(Any, Nothing)] = Iterator.empty + override def keysIterator: Iterator[Any] = Iterator.empty + override def valuesIterator: Iterator[Nothing] = Iterator.empty + def updated [V1] (key: Any, value: V1): Map[Any, V1] = new Map1(key, value) + def removed(key: Any): Map[Any, Nothing] = this + override def concat[V2 >: Nothing](suffix: IterableOnce[(Any, V2)]): Map[Any, V2] = suffix match { + case m: immutable.Map[Any, V2] => m + case _ => super.concat(suffix) + } + } + + @SerialVersionUID(3L) + final class Map1[K, +V](key1: K, value1: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { + override def size: Int = 1 + override def knownSize: Int = 1 + override def isEmpty: Boolean = false + override def apply(key: K): V = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K): Boolean = key == key1 + def get(key: K): Option[V] = + if (key == key1) Some(value1) else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 else default + def iterator: Iterator[(K, V)] = Iterator.single((key1, value1)) + override def keysIterator: Iterator[K] = Iterator.single(key1) + override def valuesIterator: Iterator[V] = Iterator.single(value1) + def updated[V1 >: V](key: K, value: V1): Map[K, V1] = + if (key == key1) new Map1(key1, value) + else new Map2(key1, value1, key, value) + def removed(key: K): Map[K, V] = + if (key == key1) Map.empty else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)) + } + override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) + override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = + if (pred((key1, value1)) != isFlipped) this else Map.empty + override def transform[W](f: (K, V) => W): Map[K, W] = { + val walue1 = f(key1, value1) + if (walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) this.asInstanceOf[Map[K, W]] + else new Map1(key1, walue1) + } + override def hashCode(): Int = { + import scala.util.hashing.MurmurHash3 + var a, b = 0 + val N = 1 + var c = 1 + + var h = MurmurHash3.tuple2Hash(key1, value1) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.mapSeed + h = MurmurHash3.mix(h, a) + h = MurmurHash3.mix(h, b) + h = MurmurHash3.mixLast(h, c) + MurmurHash3.finalizeHash(h, N) + } + } + + @SerialVersionUID(3L) + final class Map2[K, +V](key1: K, value1: V, key2: K, value2: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { + override def size: Int = 2 + override def knownSize: Int = 2 + override def isEmpty: Boolean = false + override def apply(key: K): V = + if (key == key1) value1 + else if (key == key2) value2 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K): Boolean = (key == key1) || (key == key2) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else default + def iterator: Iterator[(K, V)] = new Map2Iterator[(K, V)] { + override protected def nextResult(k: K, v: V): (K, V) = (k, v) + } + override def keysIterator: Iterator[K] = new Map2Iterator[K] { + override protected def nextResult(k: K, v: V): K = k + } + override def valuesIterator: Iterator[V] = new Map2Iterator[V] { + override protected def nextResult(k: K, v: V): V = v + } + + private abstract class Map2Iterator[A] extends AbstractIterator[A] { + private[this] var i = 0 + override def hasNext: Boolean = i < 2 + override def next(): A = { + val result = i match { + case 0 => nextResult(key1, value1) + case 1 => nextResult(key2, value2) + case _ => Iterator.empty.next() + } + i += 1 + result + } + override def drop(n: Int): Iterator[A] = { i += n; this } + protected def nextResult(k: K, v: V @uncheckedVariance): A + } + def updated[V1 >: V](key: K, value: V1): Map[K, V1] = + if (key == key1) new Map2(key1, value, key2, value2) + else if (key == key2) new Map2(key1, value1, key2, value) + else new Map3(key1, value1, key2, value2, key, value) + def removed(key: K): Map[K, V] = + if (key == key1) new Map1(key2, value2) + else if (key == key2) new Map1(key1, value1) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)) + } + override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) + override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { + var k1 = null.asInstanceOf[K] + var v1 = null.asInstanceOf[V] + var n = 0 + if (pred((key1, value1)) != isFlipped) { {k1 = key1; v1 = value1}; n += 1} + if (pred((key2, value2)) != isFlipped) { if (n == 0) {k1 = key2; v1 = value2}; n += 1} + + n match { + case 0 => Map.empty + case 1 => new Map1(k1, v1) + case 2 => this + } + } + override def transform[W](f: (K, V) => W): Map[K, W] = { + val walue1 = f(key1, value1) + val walue2 = f(key2, value2) + if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) && + (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]] + else new Map2(key1, walue1, key2, walue2) + } + override def hashCode(): Int = { + import scala.util.hashing.MurmurHash3 + var a, b = 0 + val N = 2 + var c = 1 + + var h = MurmurHash3.tuple2Hash(key1, value1) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key2, value2) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.mapSeed + h = MurmurHash3.mix(h, a) + h = MurmurHash3.mix(h, b) + h = MurmurHash3.mixLast(h, c) + MurmurHash3.finalizeHash(h, N) + } + } + + @SerialVersionUID(3L) + class Map3[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { + override def size: Int = 3 + override def knownSize: Int = 3 + override def isEmpty: Boolean = false + override def apply(key: K): V = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K): Boolean = (key == key1) || (key == key2) || (key == key3) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else default + def iterator: Iterator[(K, V)] = new Map3Iterator[(K, V)] { + override protected def nextResult(k: K, v: V): (K, V) = (k, v) + } + override def keysIterator: Iterator[K] = new Map3Iterator[K] { + override protected def nextResult(k: K, v: V): K = k + } + override def valuesIterator: Iterator[V] = new Map3Iterator[V] { + override protected def nextResult(k: K, v: V): V = v + } + + private abstract class Map3Iterator[A] extends AbstractIterator[A] { + private[this] var i = 0 + override def hasNext: Boolean = i < 3 + override def next(): A = { + val result = i match { + case 0 => nextResult(key1, value1) + case 1 => nextResult(key2, value2) + case 2 => nextResult(key3, value3) + case _ => Iterator.empty.next() + } + i += 1 + result + } + override def drop(n: Int): Iterator[A] = { i += n; this } + protected def nextResult(k: K, v: V @uncheckedVariance): A + } + def updated[V1 >: V](key: K, value: V1): Map[K, V1] = + if (key == key1) new Map3(key1, value, key2, value2, key3, value3) + else if (key == key2) new Map3(key1, value1, key2, value, key3, value3) + else if (key == key3) new Map3(key1, value1, key2, value2, key3, value) + else new Map4(key1, value1, key2, value2, key3, value3, key, value) + def removed(key: K): Map[K, V] = + if (key == key1) new Map2(key2, value2, key3, value3) + else if (key == key2) new Map2(key1, value1, key3, value3) + else if (key == key3) new Map2(key1, value1, key2, value2) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)) + } + override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) || p((key3, value3)) + override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) && p((key3, value3)) + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { + var k1, k2 = null.asInstanceOf[K] + var v1, v2 = null.asInstanceOf[V] + var n = 0 + if (pred((key1, value1)) != isFlipped) { { k1 = key1; v1 = value1 }; n += 1} + if (pred((key2, value2)) != isFlipped) { if (n == 0) { k1 = key2; v1 = value2 } else { k2 = key2; v2 = value2 }; n += 1} + if (pred((key3, value3)) != isFlipped) { if (n == 0) { k1 = key3; v1 = value3 } else if (n == 1) { k2 = key3; v2 = value3 }; n += 1} + + n match { + case 0 => Map.empty + case 1 => new Map1(k1, v1) + case 2 => new Map2(k1, v1, k2, v2) + case 3 => this + } + } + override def transform[W](f: (K, V) => W): Map[K, W] = { + val walue1 = f(key1, value1) + val walue2 = f(key2, value2) + val walue3 = f(key3, value3) + if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) && + (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) && + (walue3.asInstanceOf[AnyRef] eq value3.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]] + else new Map3(key1, walue1, key2, walue2, key3, walue3) + } + override def hashCode(): Int = { + import scala.util.hashing.MurmurHash3 + var a, b = 0 + val N = 3 + var c = 1 + + var h = MurmurHash3.tuple2Hash(key1, value1) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key2, value2) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key3, value3) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.mapSeed + h = MurmurHash3.mix(h, a) + h = MurmurHash3.mix(h, b) + h = MurmurHash3.mixLast(h, c) + MurmurHash3.finalizeHash(h, N) + } + } + + @SerialVersionUID(3L) + final class Map4[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V, key4: K, value4: V) + extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable { + + override def size: Int = 4 + override def knownSize: Int = 4 + override def isEmpty: Boolean = false + override def apply(key: K): V = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else if (key == key4) value4 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K): Boolean = (key == key1) || (key == key2) || (key == key3) || (key == key4) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else if (key == key4) Some(value4) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else if (key == key4) value4 + else default + def iterator: Iterator[(K, V)] = new Map4Iterator[(K, V)] { + override protected def nextResult(k: K, v: V): (K, V) = (k, v) + } + override def keysIterator: Iterator[K] = new Map4Iterator[K] { + override protected def nextResult(k: K, v: V): K = k + } + override def valuesIterator: Iterator[V] = new Map4Iterator[V] { + override protected def nextResult(k: K, v: V): V = v + } + + private abstract class Map4Iterator[A] extends AbstractIterator[A] { + private[this] var i = 0 + override def hasNext: Boolean = i < 4 + override def next(): A = { + val result = i match { + case 0 => nextResult(key1, value1) + case 1 => nextResult(key2, value2) + case 2 => nextResult(key3, value3) + case 3 => nextResult(key4, value4) + case _ => Iterator.empty.next() + } + i += 1 + result + } + override def drop(n: Int): Iterator[A] = { i += n; this } + protected def nextResult(k: K, v: V @uncheckedVariance): A + } + def updated[V1 >: V](key: K, value: V1): Map[K, V1] = + if (key == key1) new Map4(key1, value, key2, value2, key3, value3, key4, value4) + else if (key == key2) new Map4(key1, value1, key2, value, key3, value3, key4, value4) + else if (key == key3) new Map4(key1, value1, key2, value2, key3, value, key4, value4) + else if (key == key4) new Map4(key1, value1, key2, value2, key3, value3, key4, value) + else HashMap.empty[K, V1].updated(key1,value1).updated(key2, value2).updated(key3, value3).updated(key4, value4).updated(key, value) + def removed(key: K): Map[K, V] = + if (key == key1) new Map3(key2, value2, key3, value3, key4, value4) + else if (key == key2) new Map3(key1, value1, key3, value3, key4, value4) + else if (key == key3) new Map3(key1, value1, key2, value2, key4, value4) + else if (key == key4) new Map3(key1, value1, key2, value2, key3, value3) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4)) + } + override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) || p((key3, value3)) || p((key4, value4)) + override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) && p((key3, value3)) && p((key4, value4)) + override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = { + var k1, k2, k3 = null.asInstanceOf[K] + var v1, v2, v3 = null.asInstanceOf[V] + var n = 0 + if (pred((key1, value1)) != isFlipped) { { k1 = key1; v1 = value1 }; n += 1} + if (pred((key2, value2)) != isFlipped) { if (n == 0) { k1 = key2; v1 = value2 } else { k2 = key2; v2 = value2 }; n += 1} + if (pred((key3, value3)) != isFlipped) { if (n == 0) { k1 = key3; v1 = value3 } else if (n == 1) { k2 = key3; v2 = value3 } else { k3 = key3; v3 = value3}; n += 1} + if (pred((key4, value4)) != isFlipped) { if (n == 0) { k1 = key4; v1 = value4 } else if (n == 1) { k2 = key4; v2 = value4 } else if (n == 2) { k3 = key4; v3 = value4 }; n += 1} + + n match { + case 0 => Map.empty + case 1 => new Map1(k1, v1) + case 2 => new Map2(k1, v1, k2, v2) + case 3 => new Map3(k1, v1, k2, v2, k3, v3) + case 4 => this + } + } + override def transform[W](f: (K, V) => W): Map[K, W] = { + val walue1 = f(key1, value1) + val walue2 = f(key2, value2) + val walue3 = f(key3, value3) + val walue4 = f(key4, value4) + if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) && + (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) && + (walue3.asInstanceOf[AnyRef] eq value3.asInstanceOf[AnyRef]) && + (walue4.asInstanceOf[AnyRef] eq value4.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]] + else new Map4(key1, walue1, key2, walue2, key3, walue3, key4, walue4) + } + private[immutable] def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): builder.type = + builder.addOne(key1, value1).addOne(key2, value2).addOne(key3, value3).addOne(key4, value4) + override def hashCode(): Int = { + import scala.util.hashing.MurmurHash3 + var a, b = 0 + val N = 4 + var c = 1 + + var h = MurmurHash3.tuple2Hash(key1, value1) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key2, value2) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key3, value3) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.tuple2Hash(key4, value4) + a += h + b ^= h + c *= h | 1 + + h = MurmurHash3.mapSeed + h = MurmurHash3.mix(h, a) + h = MurmurHash3.mix(h, b) + h = MurmurHash3.mixLast(h, c) + MurmurHash3.finalizeHash(h, N) + } + } +} + +/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ +abstract class AbstractMap[K, +V] extends scala.collection.AbstractMap[K, V] with Map[K, V] + +private[immutable] final class MapBuilderImpl[K, V] extends ReusableBuilder[(K, V), Map[K, V]] { + private[this] var elems: Map[K, V] = Map.empty + private[this] var switchedToHashMapBuilder: Boolean = false + private[this] var hashMapBuilder: HashMapBuilder[K, V] = _ + + private[immutable] def getOrElse[V0 >: V](key: K, value: V0): V0 = + if (hashMapBuilder ne null) hashMapBuilder.getOrElse(key, value) + else elems.getOrElse(key, value) + + override def clear(): Unit = { + elems = Map.empty + if (hashMapBuilder != null) { + hashMapBuilder.clear() + } + switchedToHashMapBuilder = false + } + + override def result(): Map[K, V] = + if (switchedToHashMapBuilder) hashMapBuilder.result() else elems + + def addOne(key: K, value: V): this.type = { + if (switchedToHashMapBuilder) { + hashMapBuilder.addOne(key, value) + } else if (elems.size < 4) { + elems = elems.updated(key, value) + } else { + // assert(elems.size == 4) + if (elems.contains(key)) { + elems = elems.updated(key, value) + } else { + switchedToHashMapBuilder = true + if (hashMapBuilder == null) { + hashMapBuilder = new HashMapBuilder + } + elems.asInstanceOf[Map4[K, V]].buildTo(hashMapBuilder) + hashMapBuilder.addOne(key, value) + } + } + + this + } + + def addOne(elem: (K, V)) = addOne(elem._1, elem._2) + + override def addAll(xs: IterableOnce[(K, V)]): this.type = + if (switchedToHashMapBuilder) { + hashMapBuilder.addAll(xs) + this + } else { + super.addAll(xs) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/NumericRange.scala b/tests/pos-special/stdlib/collection/immutable/NumericRange.scala new file mode 100644 index 000000000000..d1ee494711a7 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/NumericRange.scala @@ -0,0 +1,507 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.immutable + +import scala.collection.Stepper.EfficientSplit +import scala.collection.{AbstractIterator, AnyStepper, IterableFactoryDefaults, Iterator, Stepper, StepperShape} + +/** `NumericRange` is a more generic version of the + * `Range` class which works with arbitrary types. + * It must be supplied with an `Integral` implementation of the + * range type. + * + * Factories for likely types include `Range.BigInt`, `Range.Long`, + * and `Range.BigDecimal`. `Range.Int` exists for completeness, but + * the `Int`-based `scala.Range` should be more performant. + * + * {{{ + * val r1 = Range(0, 100, 1) + * val veryBig = Int.MaxValue.toLong + 1 + * val r2 = Range.Long(veryBig, veryBig + 100, 1) + * assert(r1 sameElements r2.map(_ - veryBig)) + * }}} + * + * @define Coll `NumericRange` + * @define coll numeric range + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(3L) +sealed class NumericRange[T]( + val start: T, + val end: T, + val step: T, + val isInclusive: Boolean +)(implicit + num: Integral[T] +) + extends AbstractSeq[T] + with IndexedSeq[T] + with IndexedSeqOps[T, IndexedSeq, IndexedSeq[T]] + with StrictOptimizedSeqOps[T, IndexedSeq, IndexedSeq[T]] + with IterableFactoryDefaults[T, IndexedSeq] + with Serializable { self => + + override def iterator: Iterator[T] = new NumericRange.NumericRangeIterator(this, num) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = { + import scala.collection.convert._ + import impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntNumericRangeStepper (this.asInstanceOf[NumericRange[Int]], 0, length) + case StepperShape.LongShape => new LongNumericRangeStepper (this.asInstanceOf[NumericRange[Long]], 0, length) + case _ => shape.parUnbox(new AnyNumericRangeStepper[T](this, 0, length).asInstanceOf[AnyStepper[T] with EfficientSplit]) + } + s.asInstanceOf[S with EfficientSplit] + } + + + /** Note that NumericRange must be invariant so that constructs + * such as "1L to 10 by 5" do not infer the range type as AnyVal. + */ + import num._ + + // See comment in Range for why this must be lazy. + override lazy val length: Int = NumericRange.count(start, end, step, isInclusive) + override lazy val isEmpty: Boolean = ( + (num.gt(start, end) && num.gt(step, num.zero)) + || (num.lt(start, end) && num.lt(step, num.zero)) + || (num.equiv(start, end) && !isInclusive) + ) + override def last: T = + if (isEmpty) Nil.head + else locationAfterN(length - 1) + override def init: NumericRange[T] = + if (isEmpty) Nil.init + else new NumericRange(start, end - step, step, isInclusive) + + override def head: T = if (isEmpty) Nil.head else start + override def tail: NumericRange[T] = + if (isEmpty) Nil.tail + else if(isInclusive) new NumericRange.Inclusive(start + step, end, step) + else new NumericRange.Exclusive(start + step, end, step) + + /** Create a new range with the start and end values of this range and + * a new `step`. + */ + def by(newStep: T): NumericRange[T] = copy(start, end, newStep) + + + /** Create a copy of this range. + */ + def copy(start: T, end: T, step: T): NumericRange[T] = + new NumericRange(start, end, step, isInclusive) + + @throws[IndexOutOfBoundsException] + def apply(idx: Int): T = { + if (idx < 0 || idx >= length) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${length - 1})") + else locationAfterN(idx) + } + + override def foreach[@specialized(Specializable.Unit) U](f: T => U): Unit = { + var count = 0 + var current = start + while (count < length) { + f(current) + current += step + count += 1 + } + } + + // TODO: these private methods are straight copies from Range, duplicated + // to guard against any (most likely illusory) performance drop. They should + // be eliminated one way or another. + + // Tests whether a number is within the endpoints, without testing + // whether it is a member of the sequence (i.e. when step > 1.) + private def isWithinBoundaries(elem: T) = !isEmpty && ( + (step > zero && start <= elem && elem <= last ) || + (step < zero && last <= elem && elem <= start) + ) + // Methods like apply throw exceptions on invalid n, but methods like take/drop + // are forgiving: therefore the checks are with the methods. + private def locationAfterN(n: Int): T = start + (step * fromInt(n)) + + private def crossesTheEndAfterN(n: Int): Boolean = { + // if we're sure that subtraction in the context of T won't overflow, we use this function + // to calculate the length of the range + def unsafeRangeLength(r: NumericRange[T]): T = { + val diff = num.minus(r.end, r.start) + val quotient = num.quot(diff, r.step) + val remainder = num.rem(diff, r.step) + if (!r.isInclusive && num.equiv(remainder, num.zero)) + num.max(quotient, num.zero) + else + num.max(num.plus(quotient, num.one), num.zero) + } + + // detects whether value can survive a bidirectional trip to -and then from- Int. + def fitsInInteger(value: T): Boolean = num.equiv(num.fromInt(num.toInt(value)), value) + + val stepIsInTheSameDirectionAsStartToEndVector = + (num.gt(end, start) && num.gt(step, num.zero)) || (num.lt(end, start) && num.sign(step) == -num.one) + + if (num.equiv(start, end) || n <= 0 || !stepIsInTheSameDirectionAsStartToEndVector) return n >= 1 + + val sameSign = num.equiv(num.sign(start), num.sign(end)) + + if (sameSign) { // subtraction is safe + val len = unsafeRangeLength(this) + if (fitsInInteger(len)) n >= num.toInt(len) else num.gteq(num.fromInt(n), len) + } else { + // split to two ranges, which subtraction is safe in both of them (around zero) + val stepsRemainderToZero = num.rem(start, step) + val walksOnZero = num.equiv(stepsRemainderToZero, num.zero) + val closestToZero = if (walksOnZero) -step else stepsRemainderToZero + + /* + When splitting into two ranges, we should be super-careful about one of the sides hitting MinValue of T, + so we take two steps smaller than zero to ensure unsafeRangeLength won't overflow (taking one step may overflow depending on the step). + Same thing happens for MaxValue from zero, so we take one step further to ensure the safety of unsafeRangeLength. + After performing such operation, there are some elements remaining in between and around zero, + which their length is represented by carry. + */ + val (l: NumericRange[T], r: NumericRange[T], carry: Int) = + if (num.lt(start, num.zero)) { + if (walksOnZero) { + val twoStepsAfterLargestNegativeNumber = num.plus(closestToZero, num.times(step, num.fromInt(2))) + (NumericRange(start, closestToZero, step), copy(twoStepsAfterLargestNegativeNumber, end, step), 2) + } else { + (NumericRange(start, closestToZero, step), copy(num.plus(closestToZero, step), end, step), 1) + } + } else { + if (walksOnZero) { + val twoStepsAfterZero = num.times(step, num.fromInt(2)) + (copy(twoStepsAfterZero, end, step), NumericRange.inclusive(start, -step, step), 2) + } else { + val twoStepsAfterSmallestPositiveNumber = num.plus(closestToZero, num.times(step, num.fromInt(2))) + (copy(twoStepsAfterSmallestPositiveNumber, end, step), NumericRange.inclusive(start, closestToZero, step), 2) + } + } + + val leftLength = unsafeRangeLength(l) + val rightLength = unsafeRangeLength(r) + + // instead of `n >= rightLength + leftLength + curry` which may cause addition overflow, + // this can be used `(n - leftLength - curry) >= rightLength` (Both in Int and T, depends on whether the lengths fit in Int) + if (fitsInInteger(leftLength) && fitsInInteger(rightLength)) + n - num.toInt(leftLength) - carry >= num.toInt(rightLength) + else + num.gteq(num.minus(num.minus(num.fromInt(n), leftLength), num.fromInt(carry)), rightLength) + } + } + + // When one drops everything. Can't ever have unchecked operations + // like "end + 1" or "end - 1" because ranges involving Int.{ MinValue, MaxValue } + // will overflow. This creates an exclusive range where start == end + // based on the given value. + private def newEmptyRange(value: T) = NumericRange(value, value, step) + + override def take(n: Int): NumericRange[T] = { + if (n <= 0 || isEmpty) newEmptyRange(start) + else if (crossesTheEndAfterN(n)) this + else new NumericRange.Inclusive(start, locationAfterN(n - 1), step) + } + + override def drop(n: Int): NumericRange[T] = { + if (n <= 0 || isEmpty) this + else if (crossesTheEndAfterN(n)) newEmptyRange(end) + else copy(locationAfterN(n), end, step) + } + + override def splitAt(n: Int): (NumericRange[T], NumericRange[T]) = (take(n), drop(n)) + + override def reverse: NumericRange[T] = + if (isEmpty) this + else { + val newStep = -step + if (num.sign(newStep) == num.sign(step)) { + throw new ArithmeticException("number type is unsigned, and .reverse requires a negative step") + } else new NumericRange.Inclusive(last, start, newStep) + } + + import NumericRange.defaultOrdering + + override def min[T1 >: T](implicit ord: Ordering[T1]): T = + // We can take the fast path: + // - If the Integral of this NumericRange is also the requested Ordering + // (Integral <: Ordering). This can happen for custom Integral types. + // - The Ordering is the default Ordering of a well-known Integral type. + if ((ord eq num) || defaultOrdering.get(num).exists(ord eq _)) { + if (num.sign(step) > zero) head + else last + } else super.min(ord) + + override def max[T1 >: T](implicit ord: Ordering[T1]): T = + // See comment for fast path in min(). + if ((ord eq num) || defaultOrdering.get(num).exists(ord eq _)) { + if (num.sign(step) > zero) last + else head + } else super.max(ord) + + // a well-typed contains method. + def containsTyped(x: T): Boolean = + isWithinBoundaries(x) && (((x - start) % step) == zero) + + override def contains[A1 >: T](x: A1): Boolean = + try containsTyped(x.asInstanceOf[T]) + catch { case _: ClassCastException => false } + + override def sum[B >: T](implicit num: Numeric[B]): B = { + if (isEmpty) num.zero + else if (size == 1) head + else { + // If there is no overflow, use arithmetic series formula + // a + ... (n terms total) ... + b = n*(a+b)/2 + if ((num eq scala.math.Numeric.IntIsIntegral)|| + (num eq scala.math.Numeric.ShortIsIntegral)|| + (num eq scala.math.Numeric.ByteIsIntegral)|| + (num eq scala.math.Numeric.CharIsIntegral)) { + // We can do math with no overflow in a Long--easy + val exact = (size * ((num toLong head) + (num toInt last))) / 2 + num fromInt exact.toInt + } + else if (num eq scala.math.Numeric.LongIsIntegral) { + // Uh-oh, might be overflow, so we have to divide before we overflow. + // Either numRangeElements or (head + last) must be even, so divide the even one before multiplying + val a = head.toLong + val b = last.toLong + val ans = + if ((size & 1) == 0) (size / 2) * (a + b) + else size * { + // Sum is even, but we might overflow it, so divide in pieces and add back remainder + val ha = a/2 + val hb = b/2 + ha + hb + ((a - 2*ha) + (b - 2*hb)) / 2 + } + ans.asInstanceOf[B] + } + else if ((num eq scala.math.Numeric.BigIntIsIntegral) || + (num eq scala.math.Numeric.BigDecimalAsIfIntegral)) { + // No overflow, so we can use arithmetic series formula directly + // (not going to worry about running out of memory) + val numAsIntegral = num.asInstanceOf[Integral[B]] + import numAsIntegral._ + ((num fromInt size) * (head + last)) / (num fromInt 2) + } + else { + // User provided custom Numeric, so we cannot rely on arithmetic series formula (e.g. won't work on something like Z_6) + if (isEmpty) num.zero + else { + var acc = num.zero + var i = head + var idx = 0 + while(idx < length) { + acc = num.plus(acc, i) + i = i + step + idx = idx + 1 + } + acc + } + } + } + } + + override lazy val hashCode: Int = super.hashCode() + override protected final def applyPreferredMaxLength: Int = Int.MaxValue + + override def equals(other: Any): Boolean = other match { + case x: NumericRange[_] => + (x canEqual this) && (length == x.length) && ( + (isEmpty) || // all empty sequences are equal + (start == x.start && last == x.last) // same length and same endpoints implies equality + ) + case _ => + super.equals(other) + } + + override def toString: String = { + val empty = if (isEmpty) "empty " else "" + val preposition = if (isInclusive) "to" else "until" + val stepped = if (step == 1) "" else s" by $step" + s"${empty}NumericRange $start $preposition $end$stepped" + } + + override protected[this] def className = "NumericRange" +} + +/** A companion object for numeric ranges. + * @define Coll `NumericRange` + * @define coll numeric range + */ +object NumericRange { + private def bigDecimalCheckUnderflow[T](start: T, end: T, step: T)(implicit num: Integral[T]): Unit = { + def FAIL(boundary: T, step: T): Unit = { + val msg = boundary match { + case bd: BigDecimal => s"Precision ${bd.mc.getPrecision}" + case _ => "Precision" + } + throw new IllegalArgumentException( + s"$msg inadequate to represent steps of size $step near $boundary" + ) + } + if (num.minus(num.plus(start, step), start) != step) FAIL(start, step) + if (num.minus(end, num.minus(end, step)) != step) FAIL(end, step) + } + + /** Calculates the number of elements in a range given start, end, step, and + * whether or not it is inclusive. Throws an exception if step == 0 or + * the number of elements exceeds the maximum Int. + */ + def count[T](start: T, end: T, step: T, isInclusive: Boolean)(implicit num: Integral[T]): Int = { + val zero = num.zero + val upward = num.lt(start, end) + val posStep = num.gt(step, zero) + + if (step == zero) throw new IllegalArgumentException("step cannot be 0.") + else if (start == end) if (isInclusive) 1 else 0 + else if (upward != posStep) 0 + else { + /* We have to be frightfully paranoid about running out of range. + * We also can't assume that the numbers will fit in a Long. + * We will assume that if a > 0, -a can be represented, and if + * a < 0, -a+1 can be represented. We also assume that if we + * can't fit in Int, we can represent 2*Int.MaxValue+3 (at least). + * And we assume that numbers wrap rather than cap when they overflow. + */ + // Check whether we can short-circuit by deferring to Int range. + val startint = num.toInt(start) + if (start == num.fromInt(startint)) { + val endint = num.toInt(end) + if (end == num.fromInt(endint)) { + val stepint = num.toInt(step) + if (step == num.fromInt(stepint)) { + return { + if (isInclusive) Range.inclusive(startint, endint, stepint).length + else Range (startint, endint, stepint).length + } + } + } + } + // If we reach this point, deferring to Int failed. + // Numbers may be big. + if (num.isInstanceOf[Numeric.BigDecimalAsIfIntegral]) { + bigDecimalCheckUnderflow(start, end, step) // Throw exception if math is inaccurate (including no progress at all) + } + val one = num.one + val limit = num.fromInt(Int.MaxValue) + def check(t: T): T = + if (num.gt(t, limit)) throw new IllegalArgumentException("More than Int.MaxValue elements.") + else t + // If the range crosses zero, it might overflow when subtracted + val startside = num.sign(start) + val endside = num.sign(end) + num.toInt{ + if (num.gteq(num.times(startside, endside), zero)) { + // We're sure we can subtract these numbers. + // Note that we do not use .rem because of different conventions for Long and BigInt + val diff = num.minus(end, start) + val quotient = check(num.quot(diff, step)) + val remainder = num.minus(diff, num.times(quotient, step)) + if (!isInclusive && zero == remainder) quotient else check(num.plus(quotient, one)) + } + else { + // We might not even be able to subtract these numbers. + // Jump in three pieces: + // * start to -1 or 1, whichever is closer (waypointA) + // * one step, which will take us at least to 0 (ends at waypointB) + // * (except with really small numbers) + // * there to the end + val negone = num.fromInt(-1) + val startlim = if (posStep) negone else one + //Use start value if the start value is closer to zero than startlim + // * e.g. .5 is closer to zero than 1 and -.5 is closer to zero than -1 + val startdiff = { + if ((posStep && num.lt(startlim, start)) || (!posStep && num.gt(startlim, start))) + start + else + num.minus(startlim, start) + } + val startq = check(num.quot(startdiff, step)) + val waypointA = if (startq == zero) start else num.plus(start, num.times(startq, step)) + val waypointB = num.plus(waypointA, step) + check { + if (num.lt(waypointB, end) != upward) { + // No last piece + if (isInclusive && waypointB == end) num.plus(startq, num.fromInt(2)) + else num.plus(startq, one) + } + else { + // There is a last piece + val enddiff = num.minus(end,waypointB) + val endq = check(num.quot(enddiff, step)) + val last = if (endq == zero) waypointB else num.plus(waypointB, num.times(endq, step)) + // Now we have to tally up all the pieces + // 1 for the initial value + // startq steps to waypointA + // 1 step to waypointB + // endq steps to the end (one less if !isInclusive and last==end) + num.plus(startq, num.plus(endq, if (!isInclusive && last==end) one else num.fromInt(2))) + } + } + } + } + } + } + + @SerialVersionUID(3L) + class Inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]) + extends NumericRange(start, end, step, true) { + override def copy(start: T, end: T, step: T): Inclusive[T] = + NumericRange.inclusive(start, end, step) + + def exclusive: Exclusive[T] = NumericRange(start, end, step) + } + + @SerialVersionUID(3L) + class Exclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]) + extends NumericRange(start, end, step, false) { + override def copy(start: T, end: T, step: T): Exclusive[T] = + NumericRange(start, end, step) + + def inclusive: Inclusive[T] = NumericRange.inclusive(start, end, step) + } + + def apply[T](start: T, end: T, step: T)(implicit num: Integral[T]): Exclusive[T] = + new Exclusive(start, end, step) + def inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]): Inclusive[T] = + new Inclusive(start, end, step) + + private[collection] val defaultOrdering = Map[Numeric[_], Ordering[_]]( + Numeric.BigIntIsIntegral -> Ordering.BigInt, + Numeric.IntIsIntegral -> Ordering.Int, + Numeric.ShortIsIntegral -> Ordering.Short, + Numeric.ByteIsIntegral -> Ordering.Byte, + Numeric.CharIsIntegral -> Ordering.Char, + Numeric.LongIsIntegral -> Ordering.Long, + Numeric.BigDecimalAsIfIntegral -> Ordering.BigDecimal + ) + + @SerialVersionUID(3L) + private final class NumericRangeIterator[T](self: NumericRange[T], num: Integral[T]) extends AbstractIterator[T] with Serializable { + import num.mkNumericOps + + private[this] var _hasNext = !self.isEmpty + private[this] var _next: T = self.start + private[this] val lastElement: T = if (_hasNext) self.last else self.start + override def knownSize: Int = if (_hasNext) num.toInt((lastElement - _next) / self.step) + 1 else 0 + def hasNext: Boolean = _hasNext + def next(): T = { + if (!_hasNext) Iterator.empty.next() + val value = _next + _hasNext = value != lastElement + _next = num.plus(value, self.step) + value + } + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/Queue.scala b/tests/pos-special/stdlib/collection/immutable/Queue.scala new file mode 100644 index 000000000000..3d0f8206b6a9 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Queue.scala @@ -0,0 +1,217 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.{Builder, ListBuffer} + +/** `Queue` objects implement data structures that allow to + * insert and retrieve elements in a first-in-first-out (FIFO) manner. + * + * `Queue` is implemented as a pair of `List`s, one containing the ''in'' elements and the other the ''out'' elements. + * Elements are added to the ''in'' list and removed from the ''out'' list. When the ''out'' list runs dry, the + * queue is pivoted by replacing the ''out'' list by ''in.reverse'', and ''in'' by ''Nil''. + * + * Adding items to the queue always has cost `O(1)`. Removing items has cost `O(1)`, except in the case + * where a pivot is required, in which case, a cost of `O(n)` is incurred, where `n` is the number of elements in the queue. When this happens, + * `n` remove operations with `O(1)` cost are guaranteed. Removing an item is on average `O(1)`. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#immutable-queues "Scala's Collection Library overview"]] + * section on `Immutable Queues` for more information. + * + * @define Coll `immutable.Queue` + * @define coll immutable queue + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ + +sealed class Queue[+A] protected(protected val in: List[A], protected val out: List[A]) + extends AbstractSeq[A] + with LinearSeq[A] + with LinearSeqOps[A, Queue, Queue[A]] + with StrictOptimizedLinearSeqOps[A, Queue, Queue[A]] + with StrictOptimizedSeqOps[A, Queue, Queue[A]] + with IterableFactoryDefaults[A, Queue] + with DefaultSerializable { + + override def iterableFactory: SeqFactory[Queue] = Queue + + /** Returns the `n`-th element of this queue. + * The first element is at position `0`. + * + * @param n index of the element to return + * @return the element at position `n` in this queue. + * @throws NoSuchElementException if the queue is too short. + */ + override def apply(n: Int): A = { + def indexOutOfRange(): Nothing = throw new IndexOutOfBoundsException(n.toString) + + var index = 0 + var curr = out + + while (index < n && curr.nonEmpty) { + index += 1 + curr = curr.tail + } + + if (index == n) { + if (curr.nonEmpty) curr.head + else if (in.nonEmpty) in.last + else indexOutOfRange() + } else { + val indexFromBack = n - index + val inLength = in.length + if (indexFromBack >= inLength) indexOutOfRange() + else in(inLength - indexFromBack - 1) + } + } + + /** Returns the elements in the list as an iterator + */ + override def iterator: Iterator[A] = out.iterator.concat(in.reverse) + + /** Checks if the queue is empty. + * + * @return true, iff there is no element in the queue. + */ + override def isEmpty: Boolean = in.isEmpty && out.isEmpty + + override def head: A = + if (out.nonEmpty) out.head + else if (in.nonEmpty) in.last + else throw new NoSuchElementException("head on empty queue") + + override def tail: Queue[A] = + if (out.nonEmpty) new Queue(in, out.tail) + else if (in.nonEmpty) new Queue(Nil, in.reverse.tail) + else throw new NoSuchElementException("tail on empty queue") + + override def last: A = + if (in.nonEmpty) in.head + else if (out.nonEmpty) out.last + else throw new NoSuchElementException("last on empty queue") + + /* This is made to avoid inefficient implementation of iterator. */ + override def forall(p: A => Boolean): Boolean = + in.forall(p) && out.forall(p) + + /* This is made to avoid inefficient implementation of iterator. */ + override def exists(p: A => Boolean): Boolean = + in.exists(p) || out.exists(p) + + override protected[this] def className = "Queue" + + /** Returns the length of the queue. */ + override def length: Int = in.length + out.length + + override def prepended[B >: A](elem: B): Queue[B] = new Queue(in, elem :: out) + + override def appended[B >: A](elem: B): Queue[B] = enqueue(elem) + + override def appendedAll[B >: A](that: scala.collection.IterableOnce[B]): Queue[B] = { + val newIn = that match { + case that: Queue[B] => that.in ++ (that.out reverse_::: this.in) + case that: List[B] => that reverse_::: this.in + case _ => + var result: List[B] = this.in + val iter = that.iterator + while (iter.hasNext) { + result = iter.next() :: result + } + result + } + if (newIn eq this.in) this else new Queue[B](newIn, this.out) + } + + /** Creates a new queue with element added at the end + * of the old queue. + * + * @param elem the element to insert + */ + def enqueue[B >: A](elem: B): Queue[B] = new Queue(elem :: in, out) + + /** Creates a new queue with all elements provided by an `Iterable` object + * added at the end of the old queue. + * + * The elements are appended in the order they are given out by the + * iterator. + * + * @param iter an iterable object + */ + @deprecated("Use `enqueueAll` instead of `enqueue` to enqueue a collection of elements", "2.13.0") + @`inline` final def enqueue[B >: A](iter: scala.collection.Iterable[B]) = enqueueAll(iter) + + /** Creates a new queue with all elements provided by an `Iterable` object + * added at the end of the old queue. + * + * The elements are appended in the order they are given out by the + * iterator. + * + * @param iter an iterable object + */ + def enqueueAll[B >: A](iter: scala.collection.Iterable[B]): Queue[B] = appendedAll(iter) + + /** Returns a tuple with the first element in the queue, + * and a new queue with this element removed. + * + * @throws NoSuchElementException + * @return the first element of the queue. + */ + def dequeue: (A, Queue[A]) = out match { + case Nil if !in.isEmpty => val rev = in.reverse ; (rev.head, new Queue(Nil, rev.tail)) + case x :: xs => (x, new Queue(in, xs)) + case _ => throw new NoSuchElementException("dequeue on empty queue") + } + + /** Optionally retrieves the first element and a queue of the remaining elements. + * + * @return A tuple of the first element of the queue, and a new queue with this element removed. + * If the queue is empty, `None` is returned. + */ + def dequeueOption: Option[(A, Queue[A])] = if(isEmpty) None else Some(dequeue) + + /** Returns the first element in the queue, or throws an error if there + * is no element contained in the queue. + * + * @throws NoSuchElementException + * @return the first element. + */ + def front: A = head + + /** Returns a string representation of this queue. + */ + override def toString(): String = mkString("Queue(", ", ", ")") +} + +/** $factoryInfo + * @define Coll `immutable.Queue` + * @define coll immutable queue + */ +@SerialVersionUID(3L) +object Queue extends StrictOptimizedSeqFactory[Queue] { + def newBuilder[A]: Builder[A, Queue[A]] = new ListBuffer[A] mapResult (x => new Queue[A](Nil, x)) + + def from[A](source: IterableOnce[A]): Queue[A] = source match { + case q: Queue[A] => q + case _ => + val list = List.from(source) + if (list.isEmpty) empty + else new Queue(Nil, list) + } + + def empty[A]: Queue[A] = EmptyQueue + override def apply[A](xs: A*): Queue[A] = new Queue[A](Nil, xs.toList) + + private object EmptyQueue extends Queue[Nothing](Nil, Nil) { } +} diff --git a/tests/pos-special/stdlib/collection/immutable/Range.scala b/tests/pos-special/stdlib/collection/immutable/Range.scala new file mode 100644 index 000000000000..66a149840488 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Range.scala @@ -0,0 +1,672 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.immutable + +import scala.collection.Stepper.EfficientSplit +import scala.collection.convert.impl.RangeStepper +import scala.collection.{AbstractIterator, AnyStepper, IterableFactoryDefaults, Iterator, Stepper, StepperShape} +import scala.util.hashing.MurmurHash3 + +/** The `Range` class represents integer values in range + * ''[start;end)'' with non-zero step value `step`. + * It's a special case of an indexed sequence. + * For example: + * + * {{{ + * val r1 = 0 until 10 + * val r2 = r1.start until r1.end by r1.step + 1 + * println(r2.length) // = 5 + * }}} + * + * Ranges that contain more than `Int.MaxValue` elements can be created, but + * these overfull ranges have only limited capabilities. Any method that + * could require a collection of over `Int.MaxValue` length to be created, or + * could be asked to index beyond `Int.MaxValue` elements will throw an + * exception. Overfull ranges can safely be reduced in size by changing + * the step size (e.g. `by 3`) or taking/dropping elements. `contains`, + * `equals`, and access to the ends of the range (`head`, `last`, `tail`, + * `init`) are also permitted on overfull ranges. + * + * @param start the start of this range. + * @param end the end of the range. For exclusive ranges, e.g. + * `Range(0,3)` or `(0 until 3)`, this is one + * step past the last one in the range. For inclusive + * ranges, e.g. `Range.inclusive(0,3)` or `(0 to 3)`, + * it may be in the range if it is not skipped by the step size. + * To find the last element inside a non-empty range, + * use `last` instead. + * @param step the step for the range. + * + * @define coll range + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define doesNotUseBuilders + * '''Note:''' this method does not use builders to construct a new range, + * and its complexity is O(1). + */ +@SerialVersionUID(3L) +sealed abstract class Range( + val start: Int, + val end: Int, + val step: Int +) + extends AbstractSeq[Int] + with IndexedSeq[Int] + with IndexedSeqOps[Int, IndexedSeq, IndexedSeq[Int]] + with StrictOptimizedSeqOps[Int, IndexedSeq, IndexedSeq[Int]] + with IterableFactoryDefaults[Int, IndexedSeq] + with Serializable { range => + + final override def iterator: Iterator[Int] = new RangeIterator(start, step, lastElement, isEmpty) + + override final def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = { + val st = new RangeStepper(start, step, 0, length) + val r = + if (shape.shape == StepperShape.IntShape) st + else { + assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") + AnyStepper.ofParIntStepper(st) + } + r.asInstanceOf[S with EfficientSplit] + } + + private[this] def gap = end.toLong - start.toLong + private[this] def isExact = gap % step == 0 + private[this] def hasStub = isInclusive || !isExact + private[this] def longLength = gap / step + ( if (hasStub) 1 else 0 ) + + def isInclusive: Boolean + + final override val isEmpty: Boolean = ( + (start > end && step > 0) + || (start < end && step < 0) + || (start == end && !isInclusive) + ) + + private[this] val numRangeElements: Int = { + if (step == 0) throw new IllegalArgumentException("step cannot be 0.") + else if (isEmpty) 0 + else { + val len = longLength + if (len > scala.Int.MaxValue) -1 + else len.toInt + } + } + + final def length = if (numRangeElements < 0) fail() else numRangeElements + + // This field has a sensible value only for non-empty ranges + private[this] val lastElement = step match { + case 1 => if (isInclusive) end else end-1 + case -1 => if (isInclusive) end else end+1 + case _ => + val remainder = (gap % step).toInt + if (remainder != 0) end - remainder + else if (isInclusive) end + else end - step + } + + /** The last element of this range. This method will return the correct value + * even if there are too many elements to iterate over. + */ + final override def last: Int = + if (isEmpty) throw Range.emptyRangeError("last") else lastElement + final override def head: Int = + if (isEmpty) throw Range.emptyRangeError("head") else start + + /** Creates a new range containing all the elements of this range except the last one. + * + * $doesNotUseBuilders + * + * @return a new range consisting of all the elements of this range except the last one. + */ + final override def init: Range = + if (isEmpty) throw Range.emptyRangeError("init") else dropRight(1) + + /** Creates a new range containing all the elements of this range except the first one. + * + * $doesNotUseBuilders + * + * @return a new range consisting of all the elements of this range except the first one. + */ + final override def tail: Range = { + if (isEmpty) throw Range.emptyRangeError("tail") + if (numRangeElements == 1) newEmptyRange(end) + else if(isInclusive) new Range.Inclusive(start + step, end, step) + else new Range.Exclusive(start + step, end, step) + } + + override def map[B](f: Int => B): IndexedSeq[B] = { + validateMaxLength() + super.map(f) + } + + final protected def copy(start: Int = start, end: Int = end, step: Int = step, isInclusive: Boolean = isInclusive): Range = + if(isInclusive) new Range.Inclusive(start, end, step) else new Range.Exclusive(start, end, step) + + /** Create a new range with the `start` and `end` values of this range and + * a new `step`. + * + * @return a new range with a different step + */ + final def by(step: Int): Range = copy(start, end, step) + + // Check cannot be evaluated eagerly because we have a pattern where + // ranges are constructed like: "x to y by z" The "x to y" piece + // should not trigger an exception. So the calculation is delayed, + // which means it will not fail fast for those cases where failing was + // correct. + private[this] def validateMaxLength(): Unit = { + if (numRangeElements < 0) + fail() + } + private[this] def description = "%d %s %d by %s".format(start, if (isInclusive) "to" else "until", end, step) + private[this] def fail() = throw new IllegalArgumentException(description + ": seqs cannot contain more than Int.MaxValue elements.") + + @throws[IndexOutOfBoundsException] + final def apply(idx: Int): Int = { + validateMaxLength() + if (idx < 0 || idx >= numRangeElements) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${numRangeElements-1})") + else start + (step * idx) + } + + /*@`inline`*/ final override def foreach[@specialized(Unit) U](f: Int => U): Unit = { + // Implementation chosen on the basis of favorable microbenchmarks + // Note--initialization catches step == 0 so we don't need to here + if (!isEmpty) { + var i = start + while (true) { + f(i) + if (i == lastElement) return + i += step + } + } + } + + override final def indexOf[@specialized(Int) B >: Int](elem: B, from: Int = 0): Int = + elem match { + case i: Int => + val pos = posOf(i) + if (pos >= from) pos else -1 + case _ => super.indexOf(elem, from) + } + + override final def lastIndexOf[@specialized(Int) B >: Int](elem: B, end: Int = length - 1): Int = + elem match { + case i: Int => + val pos = posOf(i) + if (pos <= end) pos else -1 + case _ => super.lastIndexOf(elem, end) + } + + private[this] def posOf(i: Int): Int = + if (contains(i)) (i - start) / step else -1 + + override def sameElements[B >: Int](that: IterableOnce[B]): Boolean = that match { + case other: Range => + (this.length : @annotation.switch) match { + case 0 => other.isEmpty + case 1 => other.length == 1 && this.start == other.start + case n => other.length == n && ( + (this.start == other.start) + && (this.step == other.step) + ) + } + case _ => super.sameElements(that) + } + + /** Creates a new range containing the first `n` elements of this range. + * + * @param n the number of elements to take. + * @return a new range consisting of `n` first elements. + */ + final override def take(n: Int): Range = + if (n <= 0 || isEmpty) newEmptyRange(start) + else if (n >= numRangeElements && numRangeElements >= 0) this + else { + // May have more than Int.MaxValue elements in range (numRangeElements < 0) + // but the logic is the same either way: take the first n + new Range.Inclusive(start, locationAfterN(n - 1), step) + } + + /** Creates a new range containing all the elements of this range except the first `n` elements. + * + * @param n the number of elements to drop. + * @return a new range consisting of all the elements of this range except `n` first elements. + */ + final override def drop(n: Int): Range = + if (n <= 0 || isEmpty) this + else if (n >= numRangeElements && numRangeElements >= 0) newEmptyRange(end) + else { + // May have more than Int.MaxValue elements (numRangeElements < 0) + // but the logic is the same either way: go forwards n steps, keep the rest + copy(locationAfterN(n), end, step) + } + + /** Creates a new range consisting of the last `n` elements of the range. + * + * $doesNotUseBuilders + */ + final override def takeRight(n: Int): Range = { + if (n <= 0) newEmptyRange(start) + else if (numRangeElements >= 0) drop(numRangeElements - n) + else { + // Need to handle over-full range separately + val y = last + val x = y - step.toLong*(n-1) + if ((step > 0 && x < start) || (step < 0 && x > start)) this + else Range.inclusive(x.toInt, y, step) + } + } + + /** Creates a new range consisting of the initial `length - n` elements of the range. + * + * $doesNotUseBuilders + */ + final override def dropRight(n: Int): Range = { + if (n <= 0) this + else if (numRangeElements >= 0) take(numRangeElements - n) + else { + // Need to handle over-full range separately + val y = last - step.toInt*n + if ((step > 0 && y < start) || (step < 0 && y > start)) newEmptyRange(start) + else Range.inclusive(start, y.toInt, step) + } + } + + // Advance from the start while we meet the given test + private[this] def argTakeWhile(p: Int => Boolean): Long = { + if (isEmpty) start + else { + var current = start + val stop = last + while (current != stop && p(current)) current += step + if (current != stop || !p(current)) current + else current.toLong + step + } + } + + final override def takeWhile(p: Int => Boolean): Range = { + val stop = argTakeWhile(p) + if (stop==start) newEmptyRange(start) + else { + val x = (stop - step).toInt + if (x == last) this + else Range.inclusive(start, x, step) + } + } + + final override def dropWhile(p: Int => Boolean): Range = { + val stop = argTakeWhile(p) + if (stop == start) this + else { + val x = (stop - step).toInt + if (x == last) newEmptyRange(last) + else Range.inclusive(x + step, last, step) + } + } + + final override def span(p: Int => Boolean): (Range, Range) = { + val border = argTakeWhile(p) + if (border == start) (newEmptyRange(start), this) + else { + val x = (border - step).toInt + if (x == last) (this, newEmptyRange(last)) + else (Range.inclusive(start, x, step), Range.inclusive(x+step, last, step)) + } + } + + /** Creates a new range containing the elements starting at `from` up to but not including `until`. + * + * $doesNotUseBuilders + * + * @param from the element at which to start + * @param until the element at which to end (not included in the range) + * @return a new range consisting of a contiguous interval of values in the old range + */ + final override def slice(from: Int, until: Int): Range = + if (from <= 0) take(until) + else if (until >= numRangeElements && numRangeElements >= 0) drop(from) + else { + val fromValue = locationAfterN(from) + if (from >= until) newEmptyRange(fromValue) + else Range.inclusive(fromValue, locationAfterN(until-1), step) + } + + // Overridden only to refine the return type + final override def splitAt(n: Int): (Range, Range) = (take(n), drop(n)) + + // Methods like apply throw exceptions on invalid n, but methods like take/drop + // are forgiving: therefore the checks are with the methods. + private[this] def locationAfterN(n: Int) = start + (step * n) + + // When one drops everything. Can't ever have unchecked operations + // like "end + 1" or "end - 1" because ranges involving Int.{ MinValue, MaxValue } + // will overflow. This creates an exclusive range where start == end + // based on the given value. + private[this] def newEmptyRange(value: Int) = new Range.Exclusive(value, value, step) + + /** Returns the reverse of this range. + */ + final override def reverse: Range = + if (isEmpty) this + else new Range.Inclusive(last, start, -step) + + /** Make range inclusive. + */ + final def inclusive: Range = + if (isInclusive) this + else new Range.Inclusive(start, end, step) + + final def contains(x: Int): Boolean = { + if (x == end && !isInclusive) false + else if (step > 0) { + if (x < start || x > end) false + else (step == 1) || (Integer.remainderUnsigned(x - start, step) == 0) + } + else { + if (x < end || x > start) false + else (step == -1) || (Integer.remainderUnsigned(start - x, -step) == 0) + } + } + /* Seq#contains has a type parameter so the optimised contains above doesn't override it */ + override final def contains[B >: Int](elem: B): Boolean = elem match { + case i: Int => this.contains(i) + case _ => super.contains(elem) + } + + final override def sum[B >: Int](implicit num: Numeric[B]): Int = { + if (num eq scala.math.Numeric.IntIsIntegral) { + // this is normal integer range with usual addition. arithmetic series formula can be used + if (isEmpty) 0 + else if (size == 1) head + else ((size * (head.toLong + last)) / 2).toInt + } else { + // user provided custom Numeric, we cannot rely on arithmetic series formula + if (isEmpty) num.toInt(num.zero) + else { + var acc = num.zero + var i = head + while (true) { + acc = num.plus(acc, i) + if (i == lastElement) return num.toInt(acc) + i = i + step + } + 0 // Never hit this--just to satisfy compiler since it doesn't know while(true) has type Nothing + } + } + } + + final override def min[A1 >: Int](implicit ord: Ordering[A1]): Int = + if (ord eq Ordering.Int) { + if (step > 0) head + else last + } else if (Ordering.Int isReverseOf ord) { + if (step > 0) last + else head + } else super.min(ord) + + final override def max[A1 >: Int](implicit ord: Ordering[A1]): Int = + if (ord eq Ordering.Int) { + if (step > 0) last + else head + } else if (Ordering.Int isReverseOf ord) { + if (step > 0) head + else last + } else super.max(ord) + + override def tails: Iterator[Range] = + new AbstractIterator[Range] { + private[this] var i = 0 + override def hasNext = i <= Range.this.length + override def next() = { + if (hasNext) { + val res = Range.this.drop(i) + i += 1 + res + } else { + Iterator.empty.next() + } + } + } + + override def inits: Iterator[Range] = + new AbstractIterator[Range] { + private[this] var i = 0 + override def hasNext = i <= Range.this.length + override def next() = { + if (hasNext) { + val res = Range.this.dropRight(i) + i += 1 + res + } else { + Iterator.empty.next() + } + } + } + override protected final def applyPreferredMaxLength: Int = Int.MaxValue + + final override def equals(other: Any): Boolean = other match { + case x: Range => + // Note: this must succeed for overfull ranges (length > Int.MaxValue) + if (isEmpty) x.isEmpty // empty sequences are equal + else // this is non-empty... + x.nonEmpty && start == x.start && { // ...so other must contain something and have same start + val l0 = last + (l0 == x.last && ( // And same end + start == l0 || step == x.step // And either the same step, or not take any steps + )) + } + case _ => + super.equals(other) + } + + final override def hashCode: Int = + if(length >= 2) MurmurHash3.rangeHash(start, step, lastElement) + else super.hashCode + + final override def toString: String = { + val preposition = if (isInclusive) "to" else "until" + val stepped = if (step == 1) "" else s" by $step" + val prefix = if (isEmpty) "empty " else if (!isExact) "inexact " else "" + s"${prefix}Range $start $preposition $end$stepped" + } + + override protected[this] def className = "Range" + + override def distinct: Range = this + + override def grouped(size: Int): Iterator[Range] = { + require(size >= 1, f"size=$size%d, but size must be positive") + if (isEmpty) { + Iterator.empty + } else { + val s = size + new AbstractIterator[Range] { + private[this] var i = 0 + override def hasNext = Range.this.length > i + override def next() = + if (hasNext) { + val x = Range.this.slice(i, i + s) + i += s + x + } else { + Iterator.empty.next() + } + } + } + } + + override def sorted[B >: Int](implicit ord: Ordering[B]): IndexedSeq[Int] = + if (ord eq Ordering.Int) { + if (step > 0) { + this + } else { + reverse + } + } else { + super.sorted(ord) + } +} + +/** + * Companion object for ranges. + * @define Coll `Range` + * @define coll range + */ +object Range { + + /** Counts the number of range elements. + * precondition: step != 0 + * If the size of the range exceeds Int.MaxValue, the + * result will be negative. + */ + def count(start: Int, end: Int, step: Int, isInclusive: Boolean): Int = { + if (step == 0) + throw new IllegalArgumentException("step cannot be 0.") + + val isEmpty = + if (start == end) !isInclusive + else if (start < end) step < 0 + else step > 0 + + if (isEmpty) 0 + else { + // Counts with Longs so we can recognize too-large ranges. + val gap: Long = end.toLong - start.toLong + val jumps: Long = gap / step + // Whether the size of this range is one larger than the + // number of full-sized jumps. + val hasStub = isInclusive || (gap % step != 0) + val result: Long = jumps + ( if (hasStub) 1 else 0 ) + + if (result > scala.Int.MaxValue) -1 + else result.toInt + } + } + def count(start: Int, end: Int, step: Int): Int = + count(start, end, step, isInclusive = false) + + /** Make a range from `start` until `end` (exclusive) with given step value. + * @note step != 0 + */ + def apply(start: Int, end: Int, step: Int): Range.Exclusive = new Range.Exclusive(start, end, step) + + /** Make a range from `start` until `end` (exclusive) with step value 1. + */ + def apply(start: Int, end: Int): Range.Exclusive = new Range.Exclusive(start, end, 1) + + /** Make an inclusive range from `start` to `end` with given step value. + * @note step != 0 + */ + def inclusive(start: Int, end: Int, step: Int): Range.Inclusive = new Range.Inclusive(start, end, step) + + /** Make an inclusive range from `start` to `end` with step value 1. + */ + def inclusive(start: Int, end: Int): Range.Inclusive = new Range.Inclusive(start, end, 1) + + @SerialVersionUID(3L) + final class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) { + def isInclusive: Boolean = true + } + + @SerialVersionUID(3L) + final class Exclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) { + def isInclusive: Boolean = false + } + + // BigInt and Long are straightforward generic ranges. + object BigInt { + def apply(start: BigInt, end: BigInt, step: BigInt): NumericRange.Exclusive[BigInt] = NumericRange(start, end, step) + def inclusive(start: BigInt, end: BigInt, step: BigInt): NumericRange.Inclusive[BigInt] = NumericRange.inclusive(start, end, step) + } + + object Long { + def apply(start: Long, end: Long, step: Long): NumericRange.Exclusive[Long] = NumericRange(start, end, step) + def inclusive(start: Long, end: Long, step: Long): NumericRange.Inclusive[Long] = NumericRange.inclusive(start, end, step) + } + + // BigDecimal uses an alternative implementation of Numeric in which + // it pretends to be Integral[T] instead of Fractional[T]. See Numeric for + // details. The intention is for it to throw an exception anytime + // imprecision or surprises might result from anything, although this may + // not yet be fully implemented. + object BigDecimal { + implicit val bigDecAsIntegral: Numeric.BigDecimalAsIfIntegral = Numeric.BigDecimalAsIfIntegral + + def apply(start: BigDecimal, end: BigDecimal, step: BigDecimal): NumericRange.Exclusive[BigDecimal] = + NumericRange(start, end, step) + def inclusive(start: BigDecimal, end: BigDecimal, step: BigDecimal): NumericRange.Inclusive[BigDecimal] = + NumericRange.inclusive(start, end, step) + } + + // As there is no appealing default step size for not-really-integral ranges, + // we offer a partially constructed object. + class Partial[T, U](private val f: T => U) extends AnyVal { + def by(x: T): U = f(x) + override def toString = "Range requires step" + } + + // Illustrating genericity with Int Range, which should have the same behavior + // as the original Range class. However we leave the original Range + // indefinitely, for performance and because the compiler seems to bootstrap + // off it and won't do so with our parameterized version without modifications. + object Int { + def apply(start: Int, end: Int, step: Int): NumericRange.Exclusive[Int] = NumericRange(start, end, step) + def inclusive(start: Int, end: Int, step: Int): NumericRange.Inclusive[Int] = NumericRange.inclusive(start, end, step) + } + + private def emptyRangeError(what: String): Throwable = + new NoSuchElementException(what + " on empty Range") +} + +/** + * @param lastElement The last element included in the Range + * @param initiallyEmpty Whether the Range was initially empty or not + */ +@SerialVersionUID(3L) +private class RangeIterator( + start: Int, + step: Int, + lastElement: Int, + initiallyEmpty: Boolean +) extends AbstractIterator[Int] with Serializable { + private[this] var _hasNext: Boolean = !initiallyEmpty + private[this] var _next: Int = start + override def knownSize: Int = if (_hasNext) (lastElement - _next) / step + 1 else 0 + def hasNext: Boolean = _hasNext + @throws[NoSuchElementException] + def next(): Int = { + if (!_hasNext) Iterator.empty.next() + val value = _next + _hasNext = value != lastElement + _next = value + step + value + } + + override def drop(n: Int): Iterator[Int] = { + if (n > 0) { + val longPos = _next.toLong + step * n + if (step > 0) { + _next = Math.min(lastElement, longPos).toInt + _hasNext = longPos <= lastElement + } + else if (step < 0) { + _next = Math.max(lastElement, longPos).toInt + _hasNext = longPos >= lastElement + } + } + this + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala b/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala new file mode 100644 index 000000000000..2e7aa7b472ad --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala @@ -0,0 +1,1231 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.meta.{getter, setter} +import scala.annotation.tailrec +import scala.runtime.Statics.releaseFence + +/** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`. + * + * Implementation note: since efficiency is important for data structures this implementation + * uses `null` to represent empty trees. This also means pattern matching cannot + * easily be used. The API represented by the RedBlackTree object tries to hide these + * optimizations behind a reasonably clean API. + */ +private[collection] object RedBlackTree { + + def isEmpty(tree: Tree[_, _]): Boolean = tree eq null + + def contains[A: Ordering](tree: Tree[A, _], x: A): Boolean = lookup(tree, x) ne null + def get[A: Ordering, B](tree: Tree[A, B], x: A): Option[B] = lookup(tree, x) match { + case null => None + case tree => Some(tree.value) + } + + @tailrec + def lookup[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + val cmp = ordering.compare(x, tree.key) + if (cmp < 0) lookup(tree.left, x) + else if (cmp > 0) lookup(tree.right, x) + else tree + } + private[immutable] abstract class Helper[A](implicit val ordering: Ordering[A]) { + def beforePublish[B](tree: Tree[A, B]): Tree[A, B] = { + if (tree eq null) tree + else if (tree.isMutable) { + val res = tree.mutableBlack.makeImmutable + releaseFence() + res + } else tree.black + } + /** Create a new balanced tree where `newLeft` replaces `tree.left`. + * tree and newLeft are never null */ + protected[this] final def mutableBalanceLeft[A1, B, B1 >: B](tree: Tree[A1, B], newLeft: Tree[A1, B1]): Tree[A1, B1] = { + // Parameter trees + // tree | newLeft + // -- KV R | nl.L nl.KV nl.R + // | nl.R.L nl.R.KV nl.R.R + //Note - unlike the immutable trees we can't consider tree.left eq newLeft + //as the balance operations may mutate the same object + //but that check was mostly to avoid the object creation + if (newLeft.isRed) { + val newLeft_left = newLeft.left + val newLeft_right = newLeft.right + if (isRedTree(newLeft_left)) { + // RED + // black(nl.L) nl.KV black + // nl.R KV R + val resultLeft = newLeft_left.mutableBlack + val resultRight = tree.mutableBlackWithLeft(newLeft_right) + + newLeft.mutableWithLeftRight(resultLeft, resultRight) + } else if (isRedTree(newLeft_right)) { + // RED + // black nl.R.KV black + // nl.L nl.KV nl.R.L nl.R.R KV R + + val newLeft_right_right = newLeft_right.right + + val resultLeft = newLeft.mutableBlackWithRight(newLeft_right.left) + val resultRight = tree.mutableBlackWithLeft(newLeft_right_right) + + newLeft_right.mutableWithLeftRight(resultLeft, resultRight) + } else { + // tree + // newLeft KV R + tree.mutableWithLeft(newLeft) + } + } else { + // tree + // newLeft KV R + tree.mutableWithLeft(newLeft) + } + } + /** Create a new balanced tree where `newRight` replaces `tree.right`. + * tree and newRight are never null */ + protected[this] final def mutableBalanceRight[A1, B, B1 >: B](tree: Tree[A1, B], newRight: Tree[A1, B1]): Tree[A1, B1] = { + // Parameter trees + // tree | newRight + // L KV -- | nr.L nr.KV nr.R + // | nr.L.L nr.L.KV nr.L.R + //Note - unlike the immutable trees we can't consider tree.right eq newRight + //as the balance operations may mutate the same object + //but that check was mostly to avoid the object creation + if (newRight.isRed) { + val newRight_left = newRight.left + if (isRedTree(newRight_left)) { + // RED + // black nr.L.KV black + // L KV nr.L.L nr.L.R nr.KV nr.R + + val resultLeft = tree.mutableBlackWithRight(newRight_left.left) + val resultRight = newRight.mutableBlackWithLeft(newRight_left.right) + + newRight_left.mutableWithLeftRight(resultLeft, resultRight) + + } else { + val newRight_right = newRight.right + if (isRedTree(newRight_right)) { + // RED + // black nr.KV black(nr.R) + // L KV nr.L + + val resultLeft = tree.mutableBlackWithRight(newRight_left) + val resultRight = newRight_right.mutableBlack + + newRight.mutableWithLeftRight(resultLeft, resultRight) + } else { + // tree + // L KV newRight + tree.mutableWithRight(newRight) + } + } + } else { + // tree + // L KV newRight + tree.mutableWithRight(newRight) + } + } + } + private[immutable] class SetHelper[A](implicit ordering: Ordering[A]) extends Helper[A] { + protected[this] final def mutableUpd(tree: Tree[A, Any], k: A): Tree[A, Any] = + if (tree eq null) { + mutableRedTree(k, (), null, null) + } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) { + tree + } else { + val cmp = ordering.compare(k, tree.key) + if (cmp < 0) + mutableBalanceLeft(tree, mutableUpd(tree.left, k)) + else if (cmp > 0) + mutableBalanceRight(tree, mutableUpd(tree.right, k)) + else tree + } + } + private[immutable] class MapHelper[A, B](implicit ordering: Ordering[A]) extends Helper[A] { + protected[this] final def mutableUpd[B1 >: B](tree: Tree[A, B], k: A, v: B1): Tree[A, B1] = + if (tree eq null) { + mutableRedTree(k, v, null, null) + } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) { + tree.mutableWithV(v) + } else { + val cmp = ordering.compare(k, tree.key) + if (cmp < 0) + mutableBalanceLeft(tree, mutableUpd(tree.left, k, v)) + else if (cmp > 0) + mutableBalanceRight(tree, mutableUpd(tree.right, k, v)) + else tree.mutableWithV(v) + } + } + + def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count + def update[A: Ordering, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean): Tree[A, B1] = blacken(upd(tree, k, v, overwrite)) + def delete[A: Ordering, B](tree: Tree[A, B], k: A): Tree[A, B] = blacken(del(tree, k)) + def rangeImpl[A: Ordering, B](tree: Tree[A, B], from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match { + case (Some(from), Some(until)) => this.range(tree, from, until) + case (Some(from), None) => this.from(tree, from) + case (None, Some(until)) => this.until(tree, until) + case (None, None) => tree + } + def range[A: Ordering, B](tree: Tree[A, B], from: A, until: A): Tree[A, B] = blacken(doRange(tree, from, until)) + def from[A: Ordering, B](tree: Tree[A, B], from: A): Tree[A, B] = blacken(doFrom(tree, from)) + def to[A: Ordering, B](tree: Tree[A, B], to: A): Tree[A, B] = blacken(doTo(tree, to)) + def until[A: Ordering, B](tree: Tree[A, B], key: A): Tree[A, B] = blacken(doUntil(tree, key)) + + def drop[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doDrop(tree, n)) + def take[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doTake(tree, n)) + def slice[A: Ordering, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = blacken(doSlice(tree, from, until)) + + def smallest[A, B](tree: Tree[A, B]): Tree[A, B] = { + if (tree eq null) throw new NoSuchElementException("empty tree") + var result = tree + while (result.left ne null) result = result.left + result + } + def greatest[A, B](tree: Tree[A, B]): Tree[A, B] = { + if (tree eq null) throw new NoSuchElementException("empty tree") + var result = tree + while (result.right ne null) result = result.right + result + } + + def tail[A, B](tree: Tree[A, B]): Tree[A, B] = { + def _tail(tree: Tree[A, B]): Tree[A, B] = + if (tree eq null) throw new NoSuchElementException("empty tree") + else { + val tl = tree.left + if (tl eq null) tree.right + else if (tl.isBlack) balLeft(tree, _tail(tl), tree.right) + else tree.redWithLeft(_tail(tree.left)) + } + blacken(_tail(tree)) + } + + def init[A, B](tree: Tree[A, B]): Tree[A, B] = { + def _init(tree: Tree[A, B]): Tree[A, B] = + if (tree eq null) throw new NoSuchElementException("empty tree") + else { + val tr = tree.right + if (tr eq null) tree.left + else if (tr.isBlack) balRight(tree, tree.left, _init(tr)) + else tree.redWithRight(_init(tr)) + } + blacken(_init(tree)) + } + + /** + * Returns the smallest node with a key larger than or equal to `x`. Returns `null` if there is no such node. + */ + def minAfter[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + val cmp = ordering.compare(x, tree.key) + if (cmp == 0) tree + else if (cmp < 0) { + val l = minAfter(tree.left, x) + if (l != null) l else tree + } else minAfter(tree.right, x) + } + + /** + * Returns the largest node with a key smaller than `x`. Returns `null` if there is no such node. + */ + def maxBefore[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + val cmp = ordering.compare(x, tree.key) + if (cmp <= 0) maxBefore(tree.left, x) + else { + val r = maxBefore(tree.right, x) + if (r != null) r else tree + } + } + + def foreach[A,B,U](tree:Tree[A,B], f:((A,B)) => U):Unit = if (tree ne null) _foreach(tree,f) + + def keysEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = { + if (a eq b) true + else if (a eq null) false + else if (b eq null) false + else a.count == b.count && (new EqualsIterator(a)).sameKeys(new EqualsIterator(b)) + } + def valuesEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = { + if (a eq b) true + else if (a eq null) false + else if (b eq null) false + else a.count == b.count && (new EqualsIterator(a)).sameValues(new EqualsIterator(b)) + } + def entriesEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = { + if (a eq b) true + else if (a eq null) false + else if (b eq null) false + else a.count == b.count && (new EqualsIterator(a)).sameEntries(new EqualsIterator(b)) + } + + private[this] def _foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = { + if (tree.left ne null) _foreach(tree.left, f) + f((tree.key, tree.value)) + if (tree.right ne null) _foreach(tree.right, f) + } + + def foreachKey[A, U](tree:Tree[A,_], f: A => U):Unit = if (tree ne null) _foreachKey(tree,f) + + private[this] def _foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = { + if (tree.left ne null) _foreachKey(tree.left, f) + f((tree.key)) + if (tree.right ne null) _foreachKey(tree.right, f) + } + + def foreachEntry[A, B, U](tree:Tree[A,B], f: (A, B) => U):Unit = if (tree ne null) _foreachEntry(tree,f) + + private[this] def _foreachEntry[A, B, U](tree: Tree[A, B], f: (A, B) => U): Unit = { + if (tree.left ne null) _foreachEntry(tree.left, f) + f(tree.key, tree.value) + if (tree.right ne null) _foreachEntry(tree.right, f) + } + + def iterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[(A, B)] = new EntriesIterator(tree, start) + def keysIterator[A: Ordering](tree: Tree[A, _], start: Option[A] = None): Iterator[A] = new KeysIterator(tree, start) + def valuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[B] = new ValuesIterator(tree, start) + + @tailrec + def nth[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = { + val count = this.count(tree.left) + if (n < count) nth(tree.left, n) + else if (n > count) nth(tree.right, n - count - 1) + else tree + } + + def isBlack(tree: Tree[_, _]) = (tree eq null) || tree.isBlack + + @`inline` private[this] def isRedTree(tree: Tree[_, _]) = (tree ne null) && tree.isRed + @`inline` private[this] def isBlackTree(tree: Tree[_, _]) = (tree ne null) && tree.isBlack + + private[this] def blacken[A, B](t: Tree[A, B]): Tree[A, B] = if (t eq null) null else t.black + + // Blacken if the tree is red and has a red child. This is necessary when using methods such as `upd` or `updNth` + // for building subtrees. Use `blacken` instead when building top-level trees. + private[this] def maybeBlacken[A, B](t: Tree[A, B]): Tree[A, B] = + if(isBlack(t)) t else if(isRedTree(t.left) || isRedTree(t.right)) t.black else t + + private[this] def mkTree[A, B](isBlack: Boolean, key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = { + val sizeAndColour = sizeOf(left) + sizeOf(right) + 1 | (if(isBlack) initialBlackCount else initialRedCount) + new Tree(key, value.asInstanceOf[AnyRef], left, right, sizeAndColour) + } + + /** Create a new balanced tree where `newLeft` replaces `tree.left`. */ + private[this] def balanceLeft[A, B1](tree: Tree[A, B1], newLeft: Tree[A, B1]): Tree[A, B1] = { + // Parameter trees + // tree | newLeft + // -- KV R | nl.L nl.KV nl.R + // | nl.R.L nl.R.KV nl.R.R + if (tree.left eq newLeft) tree + else { + if (newLeft.isRed) { + val newLeft_left = newLeft.left + val newLeft_right = newLeft.right + if (isRedTree(newLeft_left)) { + // RED + // black(nl.L) nl.KV black + // nl.R KV R + val resultLeft = newLeft_left.black + val resultRight = tree.blackWithLeft(newLeft_right) + + newLeft.withLeftRight(resultLeft, resultRight) + } else if (isRedTree(newLeft_right)) { + // RED + // black nl.R.KV black + // nl.L nl.KV nl.R.L nl.R.R KV R + val newLeft_right_right = newLeft_right.right + + val resultLeft = newLeft.blackWithRight(newLeft_right.left) + val resultRight = tree.blackWithLeft(newLeft_right_right) + + newLeft_right.withLeftRight(resultLeft, resultRight) + } else { + // tree + // newLeft KV R + tree.withLeft(newLeft) + } + } else { + // tree + // newLeft KV R + tree.withLeft(newLeft) + } + } + } + /** Create a new balanced tree where `newRight` replaces `tree.right`. */ + private[this] def balanceRight[A, B1](tree: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + // Parameter trees + // tree | newRight + // L KV -- | nr.L nr.KV nr.R + // | nr.L.L nr.L.KV nr.L.R + if (tree.right eq newRight) tree + else { + if (newRight.isRed) { + val newRight_left = newRight.left + if (isRedTree(newRight_left)) { + // RED + // black nr.L.KV black + // L KV nr.L.L nr.L.R nr.KV nr.R + val resultLeft = tree.blackWithRight(newRight_left.left) + val resultRight = newRight.blackWithLeft(newRight_left.right) + + newRight_left.withLeftRight(resultLeft, resultRight) + } else { + val newRight_right = newRight.right + if (isRedTree(newRight_right)) { + // RED + // black nr.KV black(nr.R) + // L KV nr.L + val resultLeft = tree.blackWithRight(newRight_left) + val resultRight = newRight_right.black + + newRight.withLeftRight(resultLeft, resultRight) + } else { + // tree + // L KV newRight + tree.withRight(newRight) + } + } + } else { + // tree + // L KV newRight + tree.withRight(newRight) + } + } + } + + private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) { + RedTree(k, v, null, null) + } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) { + if (overwrite) + tree.withV(v) + else tree + } else { + val cmp = ordering.compare(k, tree.key) + if (cmp < 0) + balanceLeft(tree, upd(tree.left, k, v, overwrite)) + else if (cmp > 0) + balanceRight(tree, upd(tree.right, k, v, overwrite)) + else if (overwrite && (v.asInstanceOf[AnyRef] ne tree.value.asInstanceOf[AnyRef])) + tree.withV(v) + else tree + } + private[this] def updNth[A, B, B1 >: B](tree: Tree[A, B], idx: Int, k: A, v: B1): Tree[A, B1] = if (tree eq null) { + RedTree(k, v, null, null) + } else { + val rank = count(tree.left) + 1 + if (idx < rank) + balanceLeft(tree, updNth(tree.left, idx, k, v)) + else if (idx > rank) + balanceRight(tree, updNth(tree.right, idx - rank, k, v)) + else tree + } + + private[this] def doFrom[A, B](tree: Tree[A, B], from: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + if (tree eq null) return null + if (ordering.lt(tree.key, from)) return doFrom(tree.right, from) + val newLeft = doFrom(tree.left, from) + if (newLeft eq tree.left) tree + else if (newLeft eq null) upd(tree.right, tree.key, tree.value, overwrite = false) + else join(newLeft, tree.key, tree.value, tree.right) + } + private[this] def doTo[A, B](tree: Tree[A, B], to: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + if (tree eq null) return null + if (ordering.lt(to, tree.key)) return doTo(tree.left, to) + val newRight = doTo(tree.right, to) + if (newRight eq tree.right) tree + else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false) + else join (tree.left, tree.key, tree.value, newRight) + } + private[this] def doUntil[A, B](tree: Tree[A, B], until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + if (tree eq null) return null + if (ordering.lteq(until, tree.key)) return doUntil(tree.left, until) + val newRight = doUntil(tree.right, until) + if (newRight eq tree.right) tree + else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false) + else join(tree.left, tree.key, tree.value, newRight) + } + + private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + if (tree eq null) return null + if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until) + if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until) + val newLeft = doFrom(tree.left, from) + val newRight = doUntil(tree.right, until) + if ((newLeft eq tree.left) && (newRight eq tree.right)) tree + else if (newLeft eq null) upd(newRight, tree.key, tree.value, overwrite = false) + else if (newRight eq null) upd(newLeft, tree.key, tree.value, overwrite = false) + else join(newLeft, tree.key, tree.value, newRight) + } + + private[this] def doDrop[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = + if((tree eq null) || (n <= 0)) tree + else if(n >= tree.count) null + else { + val l = count(tree.left) + if(n > l) doDrop(tree.right, n-l-1) + else if(n == l) join(null, tree.key, tree.value, tree.right) + else join(doDrop(tree.left, n), tree.key, tree.value, tree.right) + } + + private[this] def doTake[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = + if((tree eq null) || (n <= 0)) null + else if(n >= tree.count) tree + else { + val l = count(tree.left) + if(n <= l) doTake(tree.left, n) + else if(n == l+1) maybeBlacken(updNth(tree.left, n, tree.key, tree.value)) + else join(tree.left, tree.key, tree.value, doTake(tree.right, n-l-1)) + } + + private[this] def doSlice[A, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = + if((tree eq null) || (from >= until) || (from >= tree.count) || (until <= 0)) null + else if((from <= 0) && (until >= tree.count)) tree + else { + val l = count(tree.left) + if(until <= l) doSlice(tree.left, from, until) + else if(from > l) doSlice(tree.right, from-l-1, until-l-1) + else join(doDrop(tree.left, from), tree.key, tree.value, doTake(tree.right, until-l-1)) + } + + /* + * Forcing direct fields access using the @`inline` annotation helps speed up + * various operations (especially smallest/greatest and update/delete). + * + * Unfortunately the direct field access is not guaranteed to work (but + * works on the current implementation of the Scala compiler). + * + * An alternative is to implement the these classes using plain old Java code... + * + * Mutability + * This implementation encodes both mutable and immutable trees. + * Mutable trees are never exposed to the user code but we get significant reductions in both CPU and allocations + * by maintaining a mutable tree during internal operations, e.g. a builder building a Tree, and the other bulk + * API such as filter or ++ + * + * Mutable trees are only used within the confines of this bulk operation and not shared + * Mutable trees may transition to become immutable by calling beforePublish + * Mutable trees may have child nodes (left and right) which are immutable Trees (this promotes structural sharing) + * + * Immutable trees may only child nodes (left and right) which are immutable Trees, and as such the immutable + * trees the entire transitive subtree is immutable + * + * Colour, mutablity and size encoding + * The colour of the Tree, its mutablity and size are all encoded in the _count field + * The colour is encoded in the top bit (31) of the _count. This allows a mutable tree to change colour without + * additional allocation + * The mutable trees always have bits 0 .. 30 (inclusive) set to 0 + * The immutable trees always have bits 0 .. 30 containing the size of the transitive subtree + * + * Naming + * All of the methods that can yield a mutable result have "mutable" on their name, and generally there + * is another method similarly named with doesn't. This is to aid safety and to reduce the cognitive load when + * reviewing changes. e.g. + * def upd(...) will update an immutable Tree, producing an immutable Tree + * def mutableUpd(...) will update a mutable or immutable Tree and may return a mutable or immutable Tree + * a method that has mutable in its name may return a immutable tree if the operation can reuse the existing tree + * + */ + private[immutable] final class Tree[A, +B]( + @(`inline` @getter @setter) private var _key: A, + @(`inline` @getter @setter) private var _value: AnyRef, + @(`inline` @getter @setter) private var _left: Tree[A, _], + @(`inline` @getter @setter) private var _right: Tree[A, _], + @(`inline` @getter @setter) private var _count: Int) + { + @`inline` private[RedBlackTree] def isMutable: Boolean = (_count & colourMask) == 0 + // read only APIs + @`inline` private[RedBlackTree] final def count = { + //devTimeAssert((_count & 0x7FFFFFFF) != 0) + _count & colourMask + } + //retain the colour, and mark as mutable + @`inline` private def mutableRetainingColour = _count & colourBit + + //inlined here to avoid outer object null checks + @`inline` private[RedBlackTree] final def sizeOf(tree:Tree[_,_]) = if (tree eq null) 0 else tree.count + @`inline` private[immutable] final def key = _key + @`inline` private[immutable] final def value = _value.asInstanceOf[B] + @`inline` private[immutable] final def left = _left.asInstanceOf[Tree[A, B]] + @`inline` private[immutable] final def right = _right.asInstanceOf[Tree[A, B]] + //Note - only used in tests outside RedBlackTree + @`inline` private[immutable] final def isBlack = _count < 0 + //Note - only used in tests outside RedBlackTree + @`inline` private[immutable] final def isRed = _count >= 0 + + override def toString: String = s"${if(isRed) "RedTree" else "BlackTree"}($key, $value, $left, $right)" + + //mutable APIs + private[RedBlackTree] def makeImmutable: Tree[A, B] = { + def makeImmutableImpl() = { + if (isMutable) { + var size = 1 + if (_left ne null) { + _left.makeImmutable + size += _left.count + } + if (_right ne null) { + _right.makeImmutable + size += _right.count + } + _count |= size //retains colour + } + this + } + makeImmutableImpl() + this + } + + private[RedBlackTree] def mutableBlack: Tree[A, B] = { + if (isBlack) this + else if (isMutable) { + _count = initialBlackCount + this + } + else new Tree(_key, _value, _left, _right, initialBlackCount) + } +// private[RedBlackTree] def mutableRed: Tree[A, B] = { +// if (isRed) this +// else if (mutable) { +// _count = initialRedCount +// this +// } +// else new Tree(_key, _value, _left, _right, initialRedCount) +// } + + private[RedBlackTree] def mutableWithV[B1 >: B](newValue: B1): Tree[A, B1] = { + if (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef]) this + else if (isMutable) { + _value = newValue.asInstanceOf[AnyRef] + this + } else new Tree(_key, newValue.asInstanceOf[AnyRef], _left, _right, mutableRetainingColour) + } + + private[RedBlackTree] def mutableWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + if (_left eq newLeft) this + else if (isMutable) { + _left = newLeft + this + } else new Tree(_key, _value, newLeft, _right, mutableRetainingColour) + } + private[RedBlackTree] def mutableWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + if (_right eq newRight) this + else if (isMutable) { + _right = newRight + this + } else new Tree(_key, _value, _left, newRight, mutableRetainingColour) + } + private[RedBlackTree] def mutableWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + if ((_left eq newLeft) && (_right eq newRight)) this + else if (isMutable) { + _left = newLeft + _right = newRight + this + } else new Tree(_key, _value, newLeft, newRight, mutableRetainingColour) + } + private[RedBlackTree] def mutableBlackWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + if ((_left eq newLeft) && isBlack) this + else if (isMutable) { + _count = initialBlackCount + _left = newLeft + this + } else new Tree(_key, _value, newLeft, _right, initialBlackCount) + } + private[RedBlackTree] def mutableBlackWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + if ((_right eq newRight) && isBlack) this + else if (isMutable) { + _count = initialBlackCount + _right = newRight + this + } else new Tree(_key, _value, _left, newRight, initialBlackCount) + } + + private[RedBlackTree] def black: Tree[A, B] = { + //assertNotMutable(this) + if (isBlack) this + else new Tree(_key, _value, _left, _right, _count ^ colourBit) + } + private[RedBlackTree] def red: Tree[A, B] = { + //assertNotMutable(this) + if (isRed) this + else new Tree(_key, _value, _left, _right, _count ^ colourBit) + } + private[RedBlackTree] def withKV[B1 >: B](newKey: A, newValue: B1): Tree[A, B1] = { + //assertNotMutable(this) + if ((newKey.asInstanceOf[AnyRef] eq _key.asInstanceOf[AnyRef]) && + (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef])) this + else new Tree(newKey, newValue.asInstanceOf[AnyRef], _left, _right, _count) + } + private[RedBlackTree] def withV[B1 >: B](newValue: B1): Tree[A, B1] = { + //assertNotMutable(this) + if (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef]) this + else new Tree(_key, newValue.asInstanceOf[AnyRef], _left, _right, _count) + } + + private[RedBlackTree] def withLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + if (newLeft eq _left) this + else { + val size = sizeOf(newLeft) + sizeOf(_right) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, (_count & colourBit) | size) + } + } + private[RedBlackTree] def withRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newRight) + if (newRight eq _right) this + else { + val size = sizeOf(_left) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, (_count & colourBit) | size) + } + } + private[RedBlackTree] def blackWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + if ((newLeft eq _left) && isBlack) this + else { + val size = sizeOf(newLeft) + sizeOf(_right) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, initialBlackCount | size) + } + } + private[RedBlackTree] def redWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + if ((newLeft eq _left) && isRed) this + else { + val size = sizeOf(newLeft) + sizeOf(_right) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, initialRedCount | size) + } + } + private[RedBlackTree] def blackWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newRight) + if ((newRight eq _right) && isBlack) this + else { + val size = sizeOf(_left) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, initialBlackCount | size) + } + } + private[RedBlackTree] def redWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + if ((newRight eq _right) && isRed) this + else { + val size = sizeOf(_left) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, initialRedCount | size) + } + } + private[RedBlackTree] def withLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + //assertNotMutable(newRight) + if ((newLeft eq _left) && (newRight eq _right)) this + else { + val size = sizeOf(newLeft) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, (_count & colourBit) | size) + } + } + private[RedBlackTree] def redWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + //assertNotMutable(newRight) + if ((newLeft eq _left) && (newRight eq _right) && isRed) this + else { + val size = sizeOf(newLeft) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, initialRedCount | size) + } + } + private[RedBlackTree] def blackWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = { + //assertNotMutable(this) + //assertNotMutable(newLeft) + //assertNotMutable(newRight) + if ((newLeft eq _left) && (newRight eq _right) && isBlack) this + else { + val size = sizeOf(newLeft) + sizeOf(newRight) + 1 + new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, initialBlackCount | size) + } + } + } + //see #Tree docs "Colour, mutablity and size encoding" + //we make these final vals because the optimiser inlines them, without reference to the enclosing module + private[RedBlackTree] final val colourBit = 0x80000000 + //really its ~colourBit but that doesnt get inlined + private[RedBlackTree] final val colourMask = colourBit - 1 + private[RedBlackTree] final val initialBlackCount = colourBit + private[RedBlackTree] final val initialRedCount = 0 + + @`inline` private[RedBlackTree] def mutableRedTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialRedCount) + @`inline` private[RedBlackTree] def mutableBlackTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialBlackCount) + + /** create a new immutable red tree. + * left and right may be null + */ + private[immutable] def RedTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]): Tree[A, B] = { + //assertNotMutable(left) + //assertNotMutable(right) + val size = sizeOf(left) + sizeOf(right) + 1 + new Tree(key, value.asInstanceOf[AnyRef], left, right, initialRedCount | size) + } + private[immutable] def BlackTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]): Tree[A, B] = { + //assertNotMutable(left) + //assertNotMutable(right) + val size = sizeOf(left) + sizeOf(right) + 1 + new Tree(key, value.asInstanceOf[AnyRef], left, right, initialBlackCount | size) + } + @`inline` private def sizeOf(tree:Tree[_,_]) = if (tree eq null) 0 else tree.count + //immutable APIs + //assertions - uncomment decls and callers when changing functionality + // private def devTimeAssert(assertion: Boolean) = { + // //uncomment this during development of the functionality + // assert(assertion) + // } + // private def assertNotMutable(t:Tree[_,_]) = { + // devTimeAssert ((t eq null) || t.count > 0) + // } + private[this] abstract class TreeIterator[A, B, R](root: Tree[A, B], start: Option[A])(protected implicit val ordering: Ordering[A]) extends AbstractIterator[R] { + protected[this] def nextResult(tree: Tree[A, B]): R + + override def hasNext: Boolean = lookahead ne null + + @throws[NoSuchElementException] + override def next(): R = { + val tree = lookahead + if(tree ne null) { + lookahead = findLeftMostOrPopOnEmpty(goRight(tree)) + nextResult(tree) + } else Iterator.empty.next() + } + + @tailrec + protected final def findLeftMostOrPopOnEmpty(tree: Tree[A, B]): Tree[A, B] = + if (tree eq null) popNext() + else if (tree.left eq null) tree + else findLeftMostOrPopOnEmpty(goLeft(tree)) + + @`inline` private[this] def pushNext(tree: Tree[A, B]): Unit = { + stackOfNexts(index) = tree + index += 1 + } + @`inline` protected final def popNext(): Tree[A, B] = if (index == 0) null else { + index -= 1 + stackOfNexts(index) + } + + protected[this] val stackOfNexts = if (root eq null) null else { + /* + * According to "Ralf Hinze. Constructing red-black trees" [https://www.cs.ox.ac.uk/ralf.hinze/publications/#P5] + * the maximum height of a red-black tree is 2*log_2(n + 2) - 2. + * + * According to {@see Integer#numberOfLeadingZeros} ceil(log_2(n)) = (32 - Integer.numberOfLeadingZeros(n - 1)) + * + * Although we don't store the deepest nodes in the path during iteration, + * we potentially do so in `startFrom`. + */ + val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(root.count + 2 - 1)) - 2 + new Array[Tree[A, B]](maximumHeight) + } + private[this] var index = 0 + protected var lookahead: Tree[A, B] = if (start.isDefined) startFrom(start.get) else findLeftMostOrPopOnEmpty(root) + + /** + * Find the leftmost subtree whose key is equal to the given key, or if no such thing, + * the leftmost subtree with the key that would be "next" after it according + * to the ordering. Along the way build up the iterator's path stack so that "next" + * functionality works. + */ + private[this] def startFrom(key: A) : Tree[A,B] = if (root eq null) null else { + @tailrec def find(tree: Tree[A, B]): Tree[A, B] = + if (tree eq null) popNext() + else find( + if (ordering.lteq(key, tree.key)) goLeft(tree) + else goRight(tree) + ) + find(root) + } + + @`inline` private[this] def goLeft(tree: Tree[A, B]) = { + pushNext(tree) + tree.left + } + + @`inline` protected final def goRight(tree: Tree[A, B]) = tree.right + } + + private[this] class EqualsIterator[A: Ordering, B](tree: Tree[A, B]) extends TreeIterator[A, B, Unit](tree, None) { + override def nextResult(tree: Tree[A, B]) = ??? + + def sameKeys[X](that:EqualsIterator[A,X]): Boolean = { + var equal = true + while (equal && (this.lookahead ne null) && (that.lookahead ne null)) { + if (this.lookahead eq that.lookahead) { + this.lookahead = this.popNext() + that.lookahead = that.popNext() + } else { + equal = (this.lookahead.key.asInstanceOf[AnyRef] eq that.lookahead.key.asInstanceOf[AnyRef]) || + ordering.equiv(this.lookahead.key, that.lookahead.key) + this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead)) + that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead)) + } + } + equal && (this.lookahead eq null) && (that.lookahead eq null) + } + def sameValues[X](that:EqualsIterator[A,X]): Boolean = { + var equal = true + while (equal && (this.lookahead ne null) && (that.lookahead ne null)) { + if (this.lookahead eq that.lookahead) { + this.lookahead = this.popNext() + that.lookahead = that.popNext() + } else { + equal = this.lookahead.value == that.lookahead.value + this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead)) + that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead)) + } + } + equal && (this.lookahead eq null) && (that.lookahead eq null) + } + def sameEntries[X](that:EqualsIterator[A,X]): Boolean = { + var equal = true + while (equal && (this.lookahead ne null) && (that.lookahead ne null)) { + if (this.lookahead eq that.lookahead) { + this.lookahead = this.popNext() + that.lookahead = that.popNext() + } else { + equal = ((this.lookahead.key.asInstanceOf[AnyRef] eq that.lookahead.key.asInstanceOf[AnyRef]) || + ordering.equiv(this.lookahead.key, that.lookahead.key)) && this.lookahead.value == that.lookahead.value + this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead)) + that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead)) + } + } + equal && (this.lookahead eq null) && (that.lookahead eq null) + } + } + private[this] class EntriesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, (A, B)](tree, focus) { + override def nextResult(tree: Tree[A, B]) = (tree.key, tree.value) + } + + private[this] class KeysIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, A](tree, focus) { + override def nextResult(tree: Tree[A, B]) = tree.key + } + + private[this] class ValuesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, B](tree, focus) { + override def nextResult(tree: Tree[A, B]) = tree.value + } + + /** Build a Tree suitable for a TreeSet from an ordered sequence of keys */ + def fromOrderedKeys[A](xs: Iterator[A], size: Int): Tree[A, Null] = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): Tree[A, Null] = size match { + case 0 => null + case 1 => mkTree(level != maxUsedDepth || level == 1, xs.next(), null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val x = xs.next() + val right = f(level+1, size-1-leftSize) + BlackTree(x, null, left, right) + } + f(1, size) + } + + /** Build a Tree suitable for a TreeMap from an ordered sequence of key/value pairs */ + def fromOrderedEntries[A, B](xs: Iterator[(A, B)], size: Int): Tree[A, B] = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): Tree[A, B] = size match { + case 0 => null + case 1 => + val (k, v) = xs.next() + mkTree(level != maxUsedDepth || level == 1, k, v, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val (k, v) = xs.next() + val right = f(level+1, size-1-leftSize) + BlackTree(k, v, left, right) + } + f(1, size) + } + + def transform[A, B, C](t: Tree[A, B], f: (A, B) => C): Tree[A, C] = + if(t eq null) null + else { + val k = t.key + val v = t.value + val l = t.left + val r = t.right + val l2 = transform(l, f) + val v2 = f(k, v) + val r2 = transform(r, f) + if((v2.asInstanceOf[AnyRef] eq v.asInstanceOf[AnyRef]) + && (l2 eq l) + && (r2 eq r)) t.asInstanceOf[Tree[A, C]] + else mkTree(t.isBlack, k, v2, l2, r2) + } + + def filterEntries[A, B](t: Tree[A, B], f: (A, B) => Boolean): Tree[A, B] = if(t eq null) null else { + def fk(t: Tree[A, B]): Tree[A, B] = { + val k = t.key + val v = t.value + val l = t.left + val r = t.right + val l2 = if(l eq null) null else fk(l) + val keep = f(k, v) + val r2 = if(r eq null) null else fk(r) + if(!keep) join2(l2, r2) + else if((l2 eq l) && (r2 eq r)) t + else join(l2, k, v, r2) + } + blacken(fk(t)) + } + + private[this] val null2 = (null, null) + + def partitionEntries[A, B](t: Tree[A, B], p: (A, B) => Boolean): (Tree[A, B], Tree[A, B]) = if(t eq null) (null, null) else { + if (t eq null) null2 + else { + object partitioner { + var tmpk, tmpd = null: Tree[A, B] // shared vars to avoid returning tuples from fk + def fk(t: Tree[A, B]): Unit = { + val k = t.key + val v = t.value + val l = t.left + val r = t.right + var l2k, l2d, r2k, r2d = null: Tree[A, B] + if (l ne null) { + fk(l) + l2k = tmpk + l2d = tmpd + } + val keep = p(k, v) + if (r ne null) { + fk(r) + r2k = tmpk + r2d = tmpd + } + val jk = + if (!keep) join2(l2k, r2k) + else if ((l2k eq l) && (r2k eq r)) t + else join(l2k, k, v, r2k) + val jd = + if (keep) join2(l2d, r2d) + else if ((l2d eq l) && (r2d eq r)) t + else join(l2d, k, v, r2d) + tmpk = jk + tmpd = jd + } + } + + partitioner.fk(t) + (blacken(partitioner.tmpk), blacken(partitioner.tmpd)) + } + } + + // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees + // Constructing Red-Black Trees, Ralf Hinze: [[https://www.cs.ox.ac.uk/ralf.hinze/publications/WAAAPL99b.ps.gz]] + // Red-Black Trees in a Functional Setting, Chris Okasaki: [[https://wiki.rice.edu/confluence/download/attachments/2761212/Okasaki-Red-Black.pdf]] */ + + private[this] def del[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + val cmp = ordering.compare(k, tree.key) + if (cmp < 0) { + val newLeft = del(tree.left, k) + if (newLeft eq tree.left) tree + else if (isBlackTree(tree.left)) balLeft(tree, newLeft, tree.right) + else tree.redWithLeft(newLeft) + } else if (cmp > 0) { + val newRight = del(tree.right, k) + if (newRight eq tree.right) tree + else if (isBlackTree(tree.right)) balRight(tree, tree.left, newRight) + else tree.redWithRight(newRight) + } else append(tree.left, tree.right) + } + + private[this] def balance[A, B](tree: Tree[A,B], tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = + if (isRedTree(tl)) { + if (isRedTree(tr)) tree.redWithLeftRight(tl.black, tr.black) + else if (isRedTree(tl.left)) tl.withLeftRight(tl.left.black, tree.blackWithLeftRight(tl.right, tr)) + else if (isRedTree(tl.right)) tl.right.withLeftRight(tl.blackWithRight(tl.right.left), tree.blackWithLeftRight(tl.right.right, tr)) + else tree.blackWithLeftRight(tl, tr) + } else if (isRedTree(tr)) { + if (isRedTree(tr.right)) tr.withLeftRight(tree.blackWithLeftRight(tl, tr.left), tr.right.black) + else if (isRedTree(tr.left)) tr.left.withLeftRight(tree.blackWithLeftRight(tl, tr.left.left), tr.blackWithLeftRight(tr.left.right, tr.right)) + else tree.blackWithLeftRight(tl, tr) + } else tree.blackWithLeftRight(tl, tr) + + private[this] def balLeft[A, B](tree: Tree[A,B], tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = + if (isRedTree(tl)) tree.redWithLeftRight(tl.black, tr) + else if (isBlackTree(tr)) balance(tree, tl, tr.red) + else if (isRedTree(tr) && isBlackTree(tr.left)) + tr.left.redWithLeftRight(tree.blackWithLeftRight(tl, tr.left.left), balance(tr, tr.left.right, tr.right.red)) + else sys.error("Defect: invariance violation") + + private[this] def balRight[A, B](tree: Tree[A,B], tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = + if (isRedTree(tr)) tree.redWithLeftRight(tl, tr.black) + else if (isBlackTree(tl)) balance(tree, tl.red, tr) + else if (isRedTree(tl) && isBlackTree(tl.right)) + tl.right.redWithLeftRight(balance(tl, tl.left.red, tl.right.left), tree.blackWithLeftRight(tl.right.right, tr)) + else sys.error("Defect: invariance violation") + + /** `append` is similar to `join2` but requires that both subtrees have the same black height */ + private[this] def append[A, B](tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = { + if (tl eq null) tr + else if (tr eq null) tl + else if (tl.isRed) { + if (tr.isRed) { + //tl is red, tr is red + val bc = append(tl.right, tr.left) + if (isRedTree(bc)) bc.withLeftRight(tl.withRight(bc.left), tr.withLeft(bc.right)) + else tl.withRight(tr.withLeft(bc)) + } else { + //tl is red, tr is black + tl.withRight(append(tl.right, tr)) + } + } else { + if (tr.isBlack) { + //tl is black tr is black + val bc = append(tl.right, tr.left) + if (isRedTree(bc)) bc.withLeftRight(tl.withRight(bc.left), tr.withLeft(bc.right)) + else balLeft(tl, tl.left, tr.withLeft(bc)) + } else { + //tl is black tr is red + tr.withLeft(append(tl, tr.left)) + } + } + } + + + // Bulk operations based on "Just Join for Parallel Ordered Sets" (https://www.cs.cmu.edu/~guyb/papers/BFS16.pdf) + // We don't store the black height in the tree so we pass it down into the join methods and derive the black height + // of child nodes from it. Where possible the black height is used directly instead of deriving the rank from it. + // Our trees are supposed to have a black root so we always blacken as the last step of union/intersect/difference. + + def union[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = blacken(_union(t1, t2)) + + def intersect[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = blacken(_intersect(t1, t2)) + + def difference[A, B](t1: Tree[A, B], t2: Tree[A, _])(implicit ordering: Ordering[A]): Tree[A, B] = + blacken(_difference(t1, t2.asInstanceOf[Tree[A, B]])) + + /** Compute the rank from a tree and its black height */ + @`inline` private[this] def rank(t: Tree[_, _], bh: Int): Int = { + if(t eq null) 0 + else if(t.isBlack) 2*(bh-1) + else 2*bh-1 + } + + private[this] def joinRight[A, B](tl: Tree[A, B], k: A, v: B, tr: Tree[A, B], bhtl: Int, rtr: Int): Tree[A, B] = { + val rtl = rank(tl, bhtl) + if(rtl == (rtr/2)*2) RedTree(k, v, tl, tr) + else { + val tlBlack = isBlackTree(tl) + val bhtlr = if(tlBlack) bhtl-1 else bhtl + val ttr = joinRight(tl.right, k, v, tr, bhtlr, rtr) + if(tlBlack && isRedTree(ttr) && isRedTree(ttr.right)) + RedTree(ttr.key, ttr.value, + BlackTree(tl.key, tl.value, tl.left, ttr.left), + ttr.right.black) + else mkTree(tlBlack, tl.key, tl.value, tl.left, ttr) + } + } + + private[this] def joinLeft[A, B](tl: Tree[A, B], k: A, v: B, tr: Tree[A, B], rtl: Int, bhtr: Int): Tree[A, B] = { + val rtr = rank(tr, bhtr) + if(rtr == (rtl/2)*2) RedTree(k, v, tl, tr) + else { + val trBlack = isBlackTree(tr) + val bhtrl = if(trBlack) bhtr-1 else bhtr + val ttl = joinLeft(tl, k, v, tr.left, rtl, bhtrl) + if(trBlack && isRedTree(ttl) && isRedTree(ttl.left)) + RedTree(ttl.key, ttl.value, + ttl.left.black, + BlackTree(tr.key, tr.value, ttl.right, tr.right)) + else mkTree(trBlack, tr.key, tr.value, ttl, tr.right) + } + } + + private[this] def join[A, B](tl: Tree[A, B], k: A, v: B, tr: Tree[A, B]): Tree[A, B] = { + @tailrec def h(t: Tree[_, _], i: Int): Int = + if(t eq null) i+1 else h(t.left, if(t.isBlack) i+1 else i) + val bhtl = h(tl, 0) + val bhtr = h(tr, 0) + if(bhtl > bhtr) { + val tt = joinRight(tl, k, v, tr, bhtl, rank(tr, bhtr)) + if(isRedTree(tt) && isRedTree(tt.right)) tt.black + else tt + } else if(bhtr > bhtl) { + val tt = joinLeft(tl, k, v, tr, rank(tl, bhtl), bhtr) + if(isRedTree(tt) && isRedTree(tt.left)) tt.black + else tt + } else mkTree(isRedTree(tl) || isRedTree(tr), k, v, tl, tr) + } + + private[this] def split[A, B](t: Tree[A, B], k2: A)(implicit ordering: Ordering[A]): (Tree[A, B], Tree[A, B], Tree[A, B], A) = + if(t eq null) (null, null, null, k2) + else { + val cmp = ordering.compare(k2, t.key) + if(cmp == 0) (t.left, t, t.right, t.key) + else if(cmp < 0) { + val (ll, b, lr, k1) = split(t.left, k2) + (ll, b, join(lr, t.key, t.value, t.right), k1) + } else { + val (rl, b, rr, k1) = split(t.right, k2) + (join(t.left, t.key, t.value, rl), b, rr, k1) + } + } + + private[this] def splitLast[A, B](t: Tree[A, B]): (Tree[A, B], A, B) = + if(t.right eq null) (t.left, t.key, t.value) + else { + val (tt, kk, vv) = splitLast(t.right) + (join(t.left, t.key, t.value, tt), kk, vv) + } + + private[this] def join2[A, B](tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = + if(tl eq null) tr + else if(tr eq null) tl + else { + val (ttl, k, v) = splitLast(tl) + join(ttl, k, v, tr) + } + + private[this] def _union[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = + if((t1 eq null) || (t1 eq t2)) t2 + else if(t2 eq null) t1 + else { + val (l1, _, r1, k1) = split(t1, t2.key) + val tl = _union(l1, t2.left) + val tr = _union(r1, t2.right) + join(tl, k1, t2.value, tr) + } + + private[this] def _intersect[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = + if((t1 eq null) || (t2 eq null)) null + else if (t1 eq t2) t1 + else { + val (l1, b, r1, k1) = split(t1, t2.key) + val tl = _intersect(l1, t2.left) + val tr = _intersect(r1, t2.right) + if(b ne null) join(tl, k1, t2.value, tr) + else join2(tl, tr) + } + + private[this] def _difference[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = + if((t1 eq null) || (t2 eq null)) t1 + else if (t1 eq t2) null + else { + val (l1, _, r1, k1) = split(t1, t2.key) + val tl = _difference(l1, t2.left) + val tr = _difference(r1, t2.right) + join2(tl, tr) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/Seq.scala b/tests/pos-special/stdlib/collection/immutable/Seq.scala index 5184cadaccae..925fd648c70c 100644 --- a/tests/pos-special/stdlib/collection/immutable/Seq.scala +++ b/tests/pos-special/stdlib/collection/immutable/Seq.scala @@ -14,8 +14,6 @@ package scala package collection package immutable -import language.experimental.captureChecking - trait Seq[+A] extends Iterable[A] with collection.Seq[A] with SeqOps[A, Seq, Seq[A]] @@ -30,7 +28,7 @@ trait Seq[+A] extends Iterable[A] * @define coll immutable sequence * @define Coll `immutable.Seq` */ -trait SeqOps[+A, +CC[_], +C] extends AnyRef with collection.SeqOps[A, CC, C] +trait SeqOps[+A, +CC[_], +C] extends Any with collection.SeqOps[A, CC, C] /** * $factoryInfo @@ -39,7 +37,7 @@ trait SeqOps[+A, +CC[_], +C] extends AnyRef with collection.SeqOps[A, CC, C] */ @SerialVersionUID(3L) object Seq extends SeqFactory.Delegate[Seq](List) { - override def from[E](it: IterableOnce[E]^): Seq[E] = it match { + override def from[E](it: IterableOnce[E]): Seq[E] = it match { case s: Seq[E] => s case _ => super.from(it) } @@ -59,7 +57,7 @@ trait IndexedSeq[+A] extends Seq[A] } - override def sameElements[B >: A](o: IterableOnce[B]^): Boolean = o match { + override def sameElements[B >: A](o: IterableOnce[B]): Boolean = o match { case that: IndexedSeq[_] => (this eq that) || { val length = this.length @@ -112,7 +110,7 @@ object IndexedSeqDefaults { @SerialVersionUID(3L) object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](Vector) { - override def from[E](it: IterableOnce[E]^): IndexedSeq[E] = it match { + override def from[E](it: IterableOnce[E]): IndexedSeq[E] = it match { case is: IndexedSeq[E] => is case _ => super.from(it) } @@ -143,14 +141,14 @@ trait LinearSeq[+A] @SerialVersionUID(3L) object LinearSeq extends SeqFactory.Delegate[LinearSeq](List) { - override def from[E](it: IterableOnce[E]^): LinearSeq[E] = it match { + override def from[E](it: IterableOnce[E]): LinearSeq[E] = it match { case ls: LinearSeq[E] => ls case _ => super.from(it) } } trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeqOps[A, CC, C]] - extends AnyRef with SeqOps[A, CC, C] + extends Any with SeqOps[A, CC, C] with collection.LinearSeqOps[A, CC, C] /** Explicit instantiation of the `Seq` trait to reduce class file size in subclasses. */ diff --git a/tests/pos-special/stdlib/collection/immutable/SeqMap.scala b/tests/pos-special/stdlib/collection/immutable/SeqMap.scala new file mode 100644 index 000000000000..aca9e139165e --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/SeqMap.scala @@ -0,0 +1,276 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.collection.mutable.{Builder, ReusableBuilder} + +/** A base trait for ordered, immutable maps. + * + * Note that the [[equals]] method for [[SeqMap]] compares key-value pairs + * without regard to ordering. + * + * All behavior is defined in terms of the abstract methods in `SeqMap`. + * It is sufficient for concrete subclasses to implement those methods. + * Methods that return a new map, in particular [[removed]] and [[updated]], must preserve ordering. + * + * @tparam K the type of the keys contained in this linked map. + * @tparam V the type of the values associated with the keys in this linked map. + * + * @define coll immutable seq map + * @define Coll `immutable.SeqMap` + */ + +trait SeqMap[K, +V] + extends Map[K, V] + with collection.SeqMap[K, V] + with MapOps[K, V, SeqMap, SeqMap[K, V]] + with MapFactoryDefaults[K, V, SeqMap, Iterable] { + override def mapFactory: MapFactory[SeqMap] = SeqMap +} + + +object SeqMap extends MapFactory[SeqMap] { + def empty[K, V]: SeqMap[K, V] = EmptySeqMap.asInstanceOf[SeqMap[K, V]] + + def from[K, V](it: collection.IterableOnce[(K, V)]): SeqMap[K, V] = + it match { + case sm: SeqMap[K, V] => sm + case _ => (newBuilder[K, V] ++= it).result() + } + + def newBuilder[K, V]: Builder[(K, V), SeqMap[K, V]] = new SeqMapBuilderImpl + + @SerialVersionUID(3L) + private object EmptySeqMap extends SeqMap[Any, Nothing] with Serializable { + override def size: Int = 0 + override def knownSize: Int = 0 + override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key) + override def contains(key: Any) = false + def get(key: Any): Option[Nothing] = None + override def getOrElse [V1](key: Any, default: => V1): V1 = default + def iterator: Iterator[(Any, Nothing)] = Iterator.empty + def updated [V1] (key: Any, value: V1): SeqMap[Any, V1] = new SeqMap1(key, value) + def removed(key: Any): SeqMap[Any, Nothing] = this + } + + @SerialVersionUID(3L) + private final class SeqMap1[K, +V](key1: K, value1: V) extends SeqMap[K,V] with Serializable { + override def size: Int = 1 + override def knownSize: Int = 1 + override def apply(key: K) = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K) = key == key1 + def get(key: K): Option[V] = + if (key == key1) Some(value1) else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 else default + def iterator = Iterator.single((key1, value1)) + def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = + if (key == key1) new SeqMap1(key1, value) + else new SeqMap2(key1, value1, key, value) + def removed(key: K): SeqMap[K, V] = + if (key == key1) SeqMap.empty else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)) + } + override def foreachEntry[U](f: (K, V) => U): Unit = { + f(key1, value1) + } + } + + @SerialVersionUID(3L) + private final class SeqMap2[K, +V](key1: K, value1: V, key2: K, value2: V) extends SeqMap[K,V] with Serializable { + override def size: Int = 2 + override def knownSize: Int = 2 + override def apply(key: K) = + if (key == key1) value1 + else if (key == key2) value2 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K) = (key == key1) || (key == key2) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else default + def iterator = ((key1, value1) :: (key2, value2) :: Nil).iterator + def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = + if (key == key1) new SeqMap2(key1, value, key2, value2) + else if (key == key2) new SeqMap2(key1, value1, key2, value) + else new SeqMap3(key1, value1, key2, value2, key, value) + def removed(key: K): SeqMap[K, V] = + if (key == key1) new SeqMap1(key2, value2) + else if (key == key2) new SeqMap1(key1, value1) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)) + } + override def foreachEntry[U](f: (K, V) => U): Unit = { + f(key1, value1) + f(key2, value2) + } + } + + @SerialVersionUID(3L) + private class SeqMap3[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V) extends SeqMap[K,V] with Serializable { + override def size: Int = 3 + override def knownSize: Int = 3 + override def apply(key: K) = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else default + def iterator = ((key1, value1) :: (key2, value2) :: (key3, value3) :: Nil).iterator + def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = + if (key == key1) new SeqMap3(key1, value, key2, value2, key3, value3) + else if (key == key2) new SeqMap3(key1, value1, key2, value, key3, value3) + else if (key == key3) new SeqMap3(key1, value1, key2, value2, key3, value) + else new SeqMap4(key1, value1, key2, value2, key3, value3, key, value) + def removed(key: K): SeqMap[K, V] = + if (key == key1) new SeqMap2(key2, value2, key3, value3) + else if (key == key2) new SeqMap2(key1, value1, key3, value3) + else if (key == key3) new SeqMap2(key1, value1, key2, value2) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)) + } + override def foreachEntry[U](f: (K, V) => U): Unit = { + f(key1, value1) + f(key2, value2) + f(key3, value3) + } + } + + @SerialVersionUID(3L) + private final class SeqMap4[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V, key4: K, value4: V) extends SeqMap[K,V] with Serializable { + override def size: Int = 4 + override def knownSize: Int = 4 + override def apply(key: K) = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else if (key == key4) value4 + else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) || (key == key4) + def get(key: K): Option[V] = + if (key == key1) Some(value1) + else if (key == key2) Some(value2) + else if (key == key3) Some(value3) + else if (key == key4) Some(value4) + else None + override def getOrElse [V1 >: V](key: K, default: => V1): V1 = + if (key == key1) value1 + else if (key == key2) value2 + else if (key == key3) value3 + else if (key == key4) value4 + else default + def iterator = ((key1, value1) :: (key2, value2) :: (key3, value3) :: (key4, value4) :: Nil).iterator + def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] = + if (key == key1) new SeqMap4(key1, value, key2, value2, key3, value3, key4, value4) + else if (key == key2) new SeqMap4(key1, value1, key2, value, key3, value3, key4, value4) + else if (key == key3) new SeqMap4(key1, value1, key2, value2, key3, value, key4, value4) + else if (key == key4) new SeqMap4(key1, value1, key2, value2, key3, value3, key4, value) + else { + // Directly create the elements for performance reasons + val fields = Vector(key1, key2, key3, key4, key) + val underlying: Map[K, (Int, V1)] = + HashMap( + (key1, (0, value1)), + (key2, (1, value2)), + (key3, (2, value3)), + (key4, (3, value4)), + (key, (4, value)) + ) + new VectorMap(fields, underlying) + } + def removed(key: K): SeqMap[K, V] = + if (key == key1) new SeqMap3(key2, value2, key3, value3, key4, value4) + else if (key == key2) new SeqMap3(key1, value1, key3, value3, key4, value4) + else if (key == key3) new SeqMap3(key1, value1, key2, value2, key4, value4) + else if (key == key4) new SeqMap3(key1, value1, key2, value2, key3, value3) + else this + override def foreach[U](f: ((K, V)) => U): Unit = { + f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4)) + } + override def foreachEntry[U](f: (K, V) => U): Unit = { + f(key1, value1) + f(key2, value2) + f(key3, value3) + f(key4, value4) + } + + private[SeqMap] def buildTo[V1 >: V](builder: Builder[(K, V1), SeqMap[K, V1]]): builder.type = + builder.addOne((key1, value1)).addOne((key2, value2)).addOne((key3, value3)).addOne((key4, value4)) + } + + private final class SeqMapBuilderImpl[K, V] extends ReusableBuilder[(K, V), SeqMap[K, V]] { + private[this] var elems: SeqMap[K, V] = SeqMap.empty + private[this] var switchedToVectorMapBuilder: Boolean = false + private[this] var vectorMapBuilder: VectorMapBuilder[K, V] = _ + + override def clear(): Unit = { + elems = SeqMap.empty + if (vectorMapBuilder != null) { + vectorMapBuilder.clear() + } + switchedToVectorMapBuilder = false + } + + override def result(): SeqMap[K, V] = + if (switchedToVectorMapBuilder) vectorMapBuilder.result() else elems + + def addOne(elem: (K, V)) = { + if (switchedToVectorMapBuilder) { + vectorMapBuilder.addOne(elem) + } else if (elems.size < 4) { + elems = elems + elem + } else { + // assert(elems.size == 4) + if (elems.contains(elem._1)) { + elems = elems + elem // will not increase the size of the map + } else { + switchedToVectorMapBuilder = true + if (vectorMapBuilder == null) { + vectorMapBuilder = new VectorMapBuilder + } + elems.asInstanceOf[SeqMap4[K, V]].buildTo(vectorMapBuilder) + vectorMapBuilder.addOne(elem) + } + } + + this + } + + override def addAll(xs: IterableOnce[(K, V)]): this.type = + if (switchedToVectorMapBuilder) { + vectorMapBuilder.addAll(xs) + this + } else { + super.addAll(xs) + } + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/Set.scala b/tests/pos-special/stdlib/collection/immutable/Set.scala new file mode 100644 index 000000000000..f07eb66991c8 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Set.scala @@ -0,0 +1,398 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.collection.immutable.Set.Set4 +import scala.collection.mutable.{Builder, ReusableBuilder} + +/** Base trait for immutable set collections */ +trait Set[A] extends Iterable[A] + with collection.Set[A] + with SetOps[A, Set, Set[A]] + with IterableFactoryDefaults[A, Set] { + override def iterableFactory: IterableFactory[Set] = Set +} + +/** Base trait for immutable set operations + * + * @define coll immutable set + * @define Coll `immutable.Set` + */ +trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]] + extends collection.SetOps[A, CC, C] { + + /** Creates a new set with an additional element, unless the element is + * already present. + * + * @param elem the element to be added + * @return a new set that contains all elements of this set and that also + * contains `elem`. + */ + def incl(elem: A): C + + /** Alias for `incl` */ + override final def + (elem: A): C = incl(elem) // like in collection.Set but not deprecated + + /** Creates a new set with a given element removed from this set. + * + * @param elem the element to be removed + * @return a new set that contains all elements of this set but that does not + * contain `elem`. + */ + def excl(elem: A): C + + /** Alias for `excl` */ + @`inline` final override def - (elem: A): C = excl(elem) + + def diff(that: collection.Set[A]): C = + foldLeft(empty)((result, elem) => if (that contains elem) result else result + elem) + + /** Creates a new $coll from this $coll by removing all elements of another + * collection. + * + * @param that the collection containing the elements to remove. + * @return a new $coll with the given elements removed, omitting duplicates. + */ + def removedAll(that: IterableOnce[A]): C = that.iterator.foldLeft[C](coll)(_ - _) + + /** Alias for removedAll */ + override final def -- (that: IterableOnce[A]): C = removedAll(that) +} + +trait StrictOptimizedSetOps[A, +CC[X], +C <: SetOps[A, CC, C]] + extends SetOps[A, CC, C] + with collection.StrictOptimizedSetOps[A, CC, C] + with StrictOptimizedIterableOps[A, CC, C] { + + override def concat(that: collection.IterableOnce[A]): C = { + var result: C = coll + val it = that.iterator + while (it.hasNext) result = result + it.next() + result + } +} + +/** + * $factoryInfo + * @define coll immutable set + * @define Coll `immutable.Set` + */ +@SerialVersionUID(3L) +object Set extends IterableFactory[Set] { + + def empty[A]: Set[A] = EmptySet.asInstanceOf[Set[A]] + + def from[E](it: collection.IterableOnce[E]): Set[E] = + it match { + // We want `SortedSet` (and subclasses, such as `BitSet`) to + // rebuild themselves to avoid element type widening issues + case _: SortedSet[E] => (newBuilder[E] ++= it).result() + case _ if it.knownSize == 0 => empty[E] + case s: Set[E] => s + case _ => (newBuilder[E] ++= it).result() + } + + def newBuilder[A]: Builder[A, Set[A]] = new SetBuilderImpl[A] + + /** An optimized representation for immutable empty sets */ + @SerialVersionUID(3L) + private object EmptySet extends AbstractSet[Any] with Serializable { + override def size: Int = 0 + override def isEmpty = true + override def knownSize: Int = size + override def filter(pred: Any => Boolean): Set[Any] = this + override def filterNot(pred: Any => Boolean): Set[Any] = this + override def removedAll(that: IterableOnce[Any]): Set[Any] = this + override def diff(that: collection.Set[Any]): Set[Any] = this + override def subsetOf(that: collection.Set[Any]): Boolean = true + override def intersect(that: collection.Set[Any]): Set[Any] = this + override def view: View[Any] = View.empty + def contains(elem: Any): Boolean = false + def incl(elem: Any): Set[Any] = new Set1(elem) + def excl(elem: Any): Set[Any] = this + def iterator: Iterator[Any] = Iterator.empty + override def foreach[U](f: Any => U): Unit = () + } + private[collection] def emptyInstance: Set[Any] = EmptySet + + @SerialVersionUID(3L) + private abstract class SetNIterator[A](n: Int) extends AbstractIterator[A] with Serializable { + private[this] var current = 0 + private[this] var remainder = n + override def knownSize: Int = remainder + def hasNext = remainder > 0 + def apply(i: Int): A + def next(): A = + if (hasNext) { + val r = apply(current) + current += 1 + remainder -= 1 + r + } else Iterator.empty.next() + + override def drop(n: Int): Iterator[A] = { + if (n > 0) { + current += n + remainder = Math.max(0, remainder - n) + } + this + } + } + + /** An optimized representation for immutable sets of size 1 */ + @SerialVersionUID(3L) + final class Set1[A] private[collection] (elem1: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { + override def size: Int = 1 + override def isEmpty = false + override def knownSize: Int = size + def contains(elem: A): Boolean = elem == elem1 + def incl(elem: A): Set[A] = + if (contains(elem)) this + else new Set2(elem1, elem) + def excl(elem: A): Set[A] = + if (elem == elem1) Set.empty + else this + def iterator: Iterator[A] = Iterator.single(elem1) + override def foreach[U](f: A => U): Unit = f(elem1) + override def exists(p: A => Boolean): Boolean = p(elem1) + override def forall(p: A => Boolean): Boolean = p(elem1) + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = + if (pred(elem1) != isFlipped) this else Set.empty + + override def find(p: A => Boolean): Option[A] = + if (p(elem1)) Some(elem1) + else None + override def head: A = elem1 + override def tail: Set[A] = Set.empty + } + + /** An optimized representation for immutable sets of size 2 */ + @SerialVersionUID(3L) + final class Set2[A] private[collection] (elem1: A, elem2: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { + override def size: Int = 2 + override def isEmpty = false + override def knownSize: Int = size + def contains(elem: A): Boolean = elem == elem1 || elem == elem2 + def incl(elem: A): Set[A] = + if (contains(elem)) this + else new Set3(elem1, elem2, elem) + def excl(elem: A): Set[A] = + if (elem == elem1) new Set1(elem2) + else if (elem == elem2) new Set1(elem1) + else this + def iterator: Iterator[A] = new SetNIterator[A](size) { + def apply(i: Int) = getElem(i) + } + private def getElem(i: Int) = i match { case 0 => elem1 case 1 => elem2 } + + override def foreach[U](f: A => U): Unit = { + f(elem1); f(elem2) + } + override def exists(p: A => Boolean): Boolean = { + p(elem1) || p(elem2) + } + override def forall(p: A => Boolean): Boolean = { + p(elem1) && p(elem2) + } + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { + var r1: A = null.asInstanceOf[A] + var n = 0 + if (pred(elem1) != isFlipped) { r1 = elem1; n += 1} + if (pred(elem2) != isFlipped) { if (n == 0) r1 = elem2; n += 1} + + n match { + case 0 => Set.empty + case 1 => new Set1(r1) + case 2 => this + } + } + override def find(p: A => Boolean): Option[A] = { + if (p(elem1)) Some(elem1) + else if (p(elem2)) Some(elem2) + else None + } + override def head: A = elem1 + override def tail: Set[A] = new Set1(elem2) + } + + /** An optimized representation for immutable sets of size 3 */ + @SerialVersionUID(3L) + final class Set3[A] private[collection] (elem1: A, elem2: A, elem3: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { + override def size: Int = 3 + override def isEmpty = false + override def knownSize: Int = size + def contains(elem: A): Boolean = + elem == elem1 || elem == elem2 || elem == elem3 + def incl(elem: A): Set[A] = + if (contains(elem)) this + else new Set4(elem1, elem2, elem3, elem) + def excl(elem: A): Set[A] = + if (elem == elem1) new Set2(elem2, elem3) + else if (elem == elem2) new Set2(elem1, elem3) + else if (elem == elem3) new Set2(elem1, elem2) + else this + def iterator: Iterator[A] = new SetNIterator[A](size) { + def apply(i: Int) = getElem(i) + } + private def getElem(i: Int) = i match { case 0 => elem1 case 1 => elem2 case 2 => elem3 } + + override def foreach[U](f: A => U): Unit = { + f(elem1); f(elem2); f(elem3) + } + override def exists(p: A => Boolean): Boolean = { + p(elem1) || p(elem2) || p(elem3) + } + override def forall(p: A => Boolean): Boolean = { + p(elem1) && p(elem2) && p(elem3) + } + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { + var r1, r2: A = null.asInstanceOf[A] + var n = 0 + if (pred(elem1) != isFlipped) { r1 = elem1; n += 1} + if (pred(elem2) != isFlipped) { if (n == 0) r1 = elem2 else r2 = elem2; n += 1} + if (pred(elem3) != isFlipped) { if (n == 0) r1 = elem3 else if (n == 1) r2 = elem3; n += 1} + + n match { + case 0 => Set.empty + case 1 => new Set1(r1) + case 2 => new Set2(r1, r2) + case 3 => this + } + } + override def find(p: A => Boolean): Option[A] = { + if (p(elem1)) Some(elem1) + else if (p(elem2)) Some(elem2) + else if (p(elem3)) Some(elem3) + else None + } + override def head: A = elem1 + override def tail: Set[A] = new Set2(elem2, elem3) + } + + /** An optimized representation for immutable sets of size 4 */ + @SerialVersionUID(3L) + final class Set4[A] private[collection] (elem1: A, elem2: A, elem3: A, elem4: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable { + override def size: Int = 4 + override def isEmpty = false + override def knownSize: Int = size + def contains(elem: A): Boolean = + elem == elem1 || elem == elem2 || elem == elem3 || elem == elem4 + def incl(elem: A): Set[A] = + if (contains(elem)) this + else HashSet.empty[A] + elem1 + elem2 + elem3 + elem4 + elem + def excl(elem: A): Set[A] = + if (elem == elem1) new Set3(elem2, elem3, elem4) + else if (elem == elem2) new Set3(elem1, elem3, elem4) + else if (elem == elem3) new Set3(elem1, elem2, elem4) + else if (elem == elem4) new Set3(elem1, elem2, elem3) + else this + def iterator: Iterator[A] = new SetNIterator[A](size) { + def apply(i: Int) = getElem(i) + } + private def getElem(i: Int) = i match { case 0 => elem1 case 1 => elem2 case 2 => elem3 case 3 => elem4 } + + override def foreach[U](f: A => U): Unit = { + f(elem1); f(elem2); f(elem3); f(elem4) + } + override def exists(p: A => Boolean): Boolean = { + p(elem1) || p(elem2) || p(elem3) || p(elem4) + } + override def forall(p: A => Boolean): Boolean = { + p(elem1) && p(elem2) && p(elem3) && p(elem4) + } + override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = { + var r1, r2, r3: A = null.asInstanceOf[A] + var n = 0 + if (pred(elem1) != isFlipped) { r1 = elem1; n += 1} + if (pred(elem2) != isFlipped) { if (n == 0) r1 = elem2 else r2 = elem2; n += 1} + if (pred(elem3) != isFlipped) { if (n == 0) r1 = elem3 else if (n == 1) r2 = elem3 else r3 = elem3; n += 1} + if (pred(elem4) != isFlipped) { if (n == 0) r1 = elem4 else if (n == 1) r2 = elem4 else if (n == 2) r3 = elem4; n += 1} + + n match { + case 0 => Set.empty + case 1 => new Set1(r1) + case 2 => new Set2(r1, r2) + case 3 => new Set3(r1, r2, r3) + case 4 => this + } + } + + override def find(p: A => Boolean): Option[A] = { + if (p(elem1)) Some(elem1) + else if (p(elem2)) Some(elem2) + else if (p(elem3)) Some(elem3) + else if (p(elem4)) Some(elem4) + else None + } + override def head: A = elem1 + override def tail: Set[A] = new Set3(elem2, elem3, elem4) + + private[immutable] def buildTo(builder: Builder[A, Set[A]]): builder.type = + builder.addOne(elem1).addOne(elem2).addOne(elem3).addOne(elem4) + } +} + +/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ +abstract class AbstractSet[A] extends scala.collection.AbstractSet[A] with Set[A] + +/** Builder for Set. + * $multipleResults + */ +private final class SetBuilderImpl[A] extends ReusableBuilder[A, Set[A]] { + private[this] var elems: Set[A] = Set.empty + private[this] var switchedToHashSetBuilder: Boolean = false + private[this] var hashSetBuilder: HashSetBuilder[A] = _ + + override def clear(): Unit = { + elems = Set.empty + if (hashSetBuilder != null) { + hashSetBuilder.clear() + } + switchedToHashSetBuilder = false + } + + override def result(): Set[A] = + if (switchedToHashSetBuilder) hashSetBuilder.result() else elems + + def addOne(elem: A) = { + if (switchedToHashSetBuilder) { + hashSetBuilder.addOne(elem) + } else if (elems.size < 4) { + elems = elems + elem + } else { + // assert(elems.size == 4) + if (elems.contains(elem)) { + () // do nothing + } else { + switchedToHashSetBuilder = true + if (hashSetBuilder == null) { + hashSetBuilder = new HashSetBuilder + } + elems.asInstanceOf[Set4[A]].buildTo(hashSetBuilder) + hashSetBuilder.addOne(elem) + } + } + + this + } + + override def addAll(xs: IterableOnce[A]): this.type = + if (switchedToHashSetBuilder) { + hashSetBuilder.addAll(xs) + this + } else { + super.addAll(xs) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/SortedMap.scala b/tests/pos-special/stdlib/collection/immutable/SortedMap.scala new file mode 100644 index 000000000000..666d8c55bfb0 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/SortedMap.scala @@ -0,0 +1,177 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.mutable.Builder + +/** An immutable map whose key-value pairs are sorted according to an [[scala.math.Ordering]] on the keys. + * + * Allows for range queries to be performed on its keys, and implementations must guarantee that traversal happens in + * sorted order, according to the map's [[scala.math.Ordering]]. + * + * @example {{{ + * import scala.collection.immutable.SortedMap + * + * // Make a SortedMap via the companion object factory + * val weekdays = SortedMap( + * 2 -> "Monday", + * 3 -> "Tuesday", + * 4 -> "Wednesday", + * 5 -> "Thursday", + * 6 -> "Friday" + * ) + * // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday) + * + * val days = weekdays ++ List(1 -> "Sunday", 7 -> "Saturday") + * // TreeMap(1 -> Sunday, 2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday, 7 -> Saturday) + * + * val day3 = days.get(3) // Some("Tuesday") + * + * val rangeOfDays = days.range(2, 5) // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday) + * + * val daysUntil2 = days.rangeUntil(2) // TreeMap(1 -> Sunday) + * val daysTo2 = days.rangeTo(2) // TreeMap(1 -> Sunday, 2 -> Monday) + * val daysAfter5 = days.rangeFrom(5) // TreeMap(5 -> Thursday, 6 -> Friday, 7 -> Saturday) + * }}} + * + * @tparam K the type of the keys contained in this tree map. + * @tparam V the type of the values associated with the keys. + */ +trait SortedMap[K, +V] + extends Map[K, V] + with collection.SortedMap[K, V] + with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] + with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map] { + + override def unsorted: Map[K, V] = this + + override def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap + + /** The same map with a given default function. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefault`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + override def withDefault[V1 >: V](d: K => V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, d) + + /** The same map with a given default value. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + override def withDefaultValue[V1 >: V](d: V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, _ => d) +} + +trait SortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends MapOps[K, V, Map, C] with collection.SortedMapOps[K, V, CC, C] { self => + + protected def coll: C with CC[K, V] + + def unsorted: Map[K, V] + + override def keySet: SortedSet[K] = new ImmutableKeySortedSet + + /** The implementation class of the set returned by `keySet` */ + protected class ImmutableKeySortedSet extends AbstractSet[K] with SortedSet[K] with GenKeySet with GenKeySortedSet { + def rangeImpl(from: Option[K], until: Option[K]): SortedSet[K] = { + val map = self.rangeImpl(from, until) + new map.ImmutableKeySortedSet + } + def incl(elem: K): SortedSet[K] = fromSpecific(this).incl(elem) + def excl(elem: K): SortedSet[K] = fromSpecific(this).excl(elem) + } + + // We override these methods to fix their return type (which would be `Map` otherwise) + def updated[V1 >: V](key: K, value: V1): CC[K, V1] + @`inline` final override def +[V1 >: V](kv: (K, V1)): CC[K, V1] = updated(kv._1, kv._2) + override def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): CC[K, V1] = { + // Implementation has been copied from `MapOps` + val previousValue = this.get(key) + remappingFunction(previousValue) match { + case None => previousValue.fold(coll)(_ => this.removed(key).coll) + case Some(nextValue) => + if (previousValue.exists(_.asInstanceOf[AnyRef] eq nextValue.asInstanceOf[AnyRef])) coll + else coll.updated(key, nextValue) + } + } + override def transform[W](f: (K, V) => W): CC[K, W] = map({ case (k, v) => (k, f(k, v)) })(ordering) +} + +trait StrictOptimizedSortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends SortedMapOps[K, V, CC, C] + with collection.StrictOptimizedSortedMapOps[K, V, CC, C] + with StrictOptimizedMapOps[K, V, Map, C] { + + override def concat[V2 >: V](xs: collection.IterableOnce[(K, V2)]): CC[K, V2] = { + var result: CC[K, V2] = coll + val it = xs.iterator + while (it.hasNext) result = result + it.next() + result + } +} + +@SerialVersionUID(3L) +object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { + + override def from[K: Ordering, V](it: IterableOnce[(K, V)]): SortedMap[K, V] = it match { + case sm: SortedMap[K, V] if Ordering[K] == sm.ordering => sm + case _ => super.from(it) + } + + final class WithDefault[K, +V](underlying: SortedMap[K, V], defaultValue: K => V) + extends Map.WithDefault[K, V](underlying, defaultValue) + with SortedMap[K, V] + with SortedMapOps[K, V, SortedMap, WithDefault[K, V]] with Serializable { + + implicit def ordering: Ordering[K] = underlying.ordering + + override def sortedMapFactory: SortedMapFactory[SortedMap] = underlying.sortedMapFactory + + def iteratorFrom(start: K): scala.collection.Iterator[(K, V)] = underlying.iteratorFrom(start) + + def keysIteratorFrom(start: K): scala.collection.Iterator[K] = underlying.keysIteratorFrom(start) + + def rangeImpl(from: Option[K], until: Option[K]): WithDefault[K, V] = + new WithDefault[K, V](underlying.rangeImpl(from, until), defaultValue) + + // Need to override following methods to match type signatures of `SortedMap.WithDefault` + // for operations preserving default value + + override def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = + new WithDefault[K, V1](underlying.updated(key, value), defaultValue) + + override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]): WithDefault[K, V2] = + new WithDefault( underlying.concat(xs) , defaultValue) + + override def removed(key: K): WithDefault[K, V] = new WithDefault[K, V](underlying.removed(key), defaultValue) + + override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)] @uncheckedVariance): WithDefault[K, V] = + new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue) + + override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] @uncheckedVariance = + SortedMap.newBuilder.mapResult((p: SortedMap[K, V]) => new WithDefault[K, V](p, defaultValue)) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/SortedSet.scala b/tests/pos-special/stdlib/collection/immutable/SortedSet.scala new file mode 100644 index 000000000000..303e5ea9658c --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/SortedSet.scala @@ -0,0 +1,57 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +/** Base trait for sorted sets */ +trait SortedSet[A] + extends Set[A] + with collection.SortedSet[A] + with SortedSetOps[A, SortedSet, SortedSet[A]] + with SortedSetFactoryDefaults[A, SortedSet, Set] { + + override def unsorted: Set[A] = this + + override def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet +} + +/** + * @define coll immutable sorted set + * @define Coll `immutable.SortedSet` + */ +trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SetOps[A, Set, C] + with collection.SortedSetOps[A, CC, C] { + + def unsorted: Set[A] +} + +trait StrictOptimizedSortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SortedSetOps[A, CC, C] + with collection.StrictOptimizedSortedSetOps[A, CC, C] + with StrictOptimizedSetOps[A, Set, C] { +} + +/** + * $factoryInfo + * @define coll immutable sorted set + * @define Coll `immutable.SortedSet` + */ +@SerialVersionUID(3L) +object SortedSet extends SortedIterableFactory.Delegate[SortedSet](TreeSet) { + override def from[E: Ordering](it: IterableOnce[E]): SortedSet[E] = it match { + case ss: SortedSet[E] if Ordering[E] == ss.ordering => ss + case _ => super.from(it) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/Stream.scala b/tests/pos-special/stdlib/collection/immutable/Stream.scala new file mode 100644 index 000000000000..ae03641e97dd --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Stream.scala @@ -0,0 +1,568 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import java.io.{ObjectInputStream, ObjectOutputStream} +import java.lang.{StringBuilder => JStringBuilder} + +import scala.annotation.tailrec +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.generic.SerializeEnd +import scala.collection.mutable.{ArrayBuffer, StringBuilder} +import scala.language.implicitConversions +import Stream.cons + +@deprecated("Use LazyList (which is fully lazy) instead of Stream (which has a lazy tail only)", "2.13.0") +@SerialVersionUID(3L) +sealed abstract class Stream[+A] extends AbstractSeq[A] + with LinearSeq[A] + with LinearSeqOps[A, Stream, Stream[A]] + with IterableFactoryDefaults[A, Stream] + with Serializable { + def tail: Stream[A] + + /** Forces evaluation of the whole `Stream` and returns it. + * + * @note Often we use `Stream`s to represent an infinite set or series. If + * that's the case for your particular `Stream` then this function will never + * return and will probably crash the VM with an `OutOfMemory` exception. + * This function will not hang on a finite cycle, however. + * + * @return The fully realized `Stream`. + */ + def force: this.type + + override def iterableFactory: SeqFactory[Stream] = Stream + + override protected[this] def className: String = "Stream" + + /** Apply the given function `f` to each element of this linear sequence + * (while respecting the order of the elements). + * + * @param f The treatment to apply to each element. + * @note Overridden here as final to trigger tail-call optimization, which + * replaces 'this' with 'tail' at each iteration. This is absolutely + * necessary for allowing the GC to collect the underlying Stream as elements + * are consumed. + * @note This function will force the realization of the entire Stream + * unless the `f` throws an exception. + */ + @tailrec + override final def foreach[U](f: A => U): Unit = { + if (!this.isEmpty) { + f(head) + tail.foreach(f) + } + } + + @tailrec + override final def find(p: A => Boolean): Option[A] = { + if(isEmpty) None + else if(p(head)) Some(head) + else tail.find(p) + } + + override def take(n: Int): Stream[A] = { + if (n <= 0 || isEmpty) Stream.empty + else if (n == 1) new Stream.Cons(head, Stream.empty) + else new Stream.Cons(head, tail.take(n - 1)) + } + + /** Stream specialization of foldLeft which allows GC to collect along the + * way. + * + * @tparam B The type of value being accumulated. + * @param z The initial value seeded into the function `op`. + * @param op The operation to perform on successive elements of the `Stream`. + * @return The accumulated value from successive applications of `op`. + */ + @tailrec + override final def foldLeft[B](z: B)(op: (B, A) => B): B = { + if (this.isEmpty) z + else tail.foldLeft(op(z, head))(op) + } + + /** The stream resulting from the concatenation of this stream with the argument stream. + * @param rest The collection that gets appended to this stream + * @return The stream containing elements of this stream and the iterable object. + */ + @deprecated("The `append` operation has been renamed `lazyAppendedAll`", "2.13.0") + @inline final def append[B >: A](rest: => IterableOnce[B]): Stream[B] = lazyAppendedAll(rest) + + protected[this] def writeReplace(): AnyRef = + if(nonEmpty && tailDefined) new Stream.SerializationProxy[A](this) else this + + /** Prints elements of this stream one by one, separated by commas. */ + @deprecated(message = """Use print(stream.force.mkString(", ")) instead""", since = "2.13.0") + @inline def print(): Unit = Console.print(this.force.mkString(", ")) + + /** Prints elements of this stream one by one, separated by `sep`. + * @param sep The separator string printed between consecutive elements. + */ + @deprecated(message = "Use print(stream.force.mkString(sep)) instead", since = "2.13.0") + @inline def print(sep: String): Unit = Console.print(this.force.mkString(sep)) + + /** The stream resulting from the concatenation of this stream with the argument stream. + * + * @param suffix The collection that gets appended to this stream + * @return The stream containing elements of this stream and the iterable object. + */ + def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]): Stream[B] = + if (isEmpty) iterableFactory.from(suffix) else cons[B](head, tail.lazyAppendedAll(suffix)) + + override def scanLeft[B](z: B)(op: (B, A) => B): Stream[B] = + if (isEmpty) z +: iterableFactory.empty + else cons(z, tail.scanLeft(op(z, head))(op)) + + /** Stream specialization of reduceLeft which allows GC to collect + * along the way. + * + * @tparam B The type of value being accumulated. + * @param f The operation to perform on successive elements of the `Stream`. + * @return The accumulated value from successive applications of `f`. + */ + override final def reduceLeft[B >: A](f: (B, A) => B): B = { + if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") + else { + var reducedRes: B = this.head + var left: Stream[A] = this.tail + while (!left.isEmpty) { + reducedRes = f(reducedRes, left.head) + left = left.tail + } + reducedRes + } + } + + override def partition(p: A => Boolean): (Stream[A], Stream[A]) = (filter(p(_)), filterNot(p(_))) + + override def filter(pred: A => Boolean): Stream[A] = filterImpl(pred, isFlipped = false) + + override def filterNot(pred: A => Boolean): Stream[A] = filterImpl(pred, isFlipped = true) + + private[immutable] def filterImpl(p: A => Boolean, isFlipped: Boolean): Stream[A] = { + // optimization: drop leading prefix of elems for which f returns false + // var rest = this dropWhile (!p(_)) - forget DRY principle - GC can't collect otherwise + var rest: Stream[A] = coll + while (rest.nonEmpty && p(rest.head) == isFlipped) rest = rest.tail + // private utility func to avoid `this` on stack (would be needed for the lazy arg) + if (rest.nonEmpty) Stream.filteredTail(rest, p, isFlipped) + else iterableFactory.empty + } + + /** A `collection.WithFilter` which allows GC of the head of stream during processing */ + override final def withFilter(p: A => Boolean): collection.WithFilter[A, Stream] = + Stream.withFilter(coll, p) + + override final def prepended[B >: A](elem: B): Stream[B] = cons(elem, coll) + + override final def map[B](f: A => B): Stream[B] = + if (isEmpty) iterableFactory.empty + else cons(f(head), tail.map(f)) + + @tailrec override final def collect[B](pf: PartialFunction[A, B]): Stream[B] = + if(isEmpty) Stream.empty + else { + var newHead: B = null.asInstanceOf[B] + val runWith = pf.runWith((b: B) => newHead = b) + if(runWith(head)) Stream.collectedTail(newHead, this, pf) + else tail.collect(pf) + } + + @tailrec override final def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = + if(isEmpty) None + else { + var newHead: B = null.asInstanceOf[B] + val runWith = pf.runWith((b: B) => newHead = b) + if(runWith(head)) Some(newHead) + else tail.collectFirst(pf) + } + + // optimisations are not for speed, but for functionality + // see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala) + override final def flatMap[B](f: A => IterableOnce[B]): Stream[B] = + if (isEmpty) iterableFactory.empty + else { + // establish !prefix.isEmpty || nonEmptyPrefix.isEmpty + var nonEmptyPrefix: Stream[A] = coll + var prefix = iterableFactory.from(f(nonEmptyPrefix.head)) + while (!nonEmptyPrefix.isEmpty && prefix.isEmpty) { + nonEmptyPrefix = nonEmptyPrefix.tail + if(!nonEmptyPrefix.isEmpty) + prefix = iterableFactory.from(f(nonEmptyPrefix.head)) + } + + if (nonEmptyPrefix.isEmpty) iterableFactory.empty + else prefix.lazyAppendedAll(nonEmptyPrefix.tail.flatMap(f)) + } + + override final def zip[B](that: collection.IterableOnce[B]): Stream[(A, B)] = + if (this.isEmpty || that.isEmpty) iterableFactory.empty + else { + val thatIterable = that match { + case that: collection.Iterable[B] => that + case _ => LazyList.from(that) + } + cons[(A, B)]((this.head, thatIterable.head), this.tail.zip(thatIterable.tail)) + } + + override final def zipWithIndex: Stream[(A, Int)] = this.zip(LazyList.from(0)) + + protected def tailDefined: Boolean + + /** Appends all elements of this $coll to a string builder using start, end, and separator strings. + * The written text begins with the string `start` and ends with the string `end`. + * Inside, the string representations (w.r.t. the method `toString`) + * of all elements of this $coll are separated by the string `sep`. + * + * Undefined elements are represented with `"_"`, an undefined tail is represented with `"<not computed>"`, + * and cycles are represented with `"<cycle>"`. + * + * @param sb the string builder to which elements are appended. + * @param start the starting string. + * @param sep the separator string. + * @param end the ending string. + * @return the string builder `b` to which elements were appended. + */ + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { + force + addStringNoForce(sb.underlying, start, sep, end) + sb + } + + private[this] def addStringNoForce(b: JStringBuilder, start: String, sep: String, end: String): JStringBuilder = { + b.append(start) + if (nonEmpty) { + b.append(head) + var cursor = this + def appendCursorElement(): Unit = b.append(sep).append(cursor.head) + if (tailDefined) { // If tailDefined, also !isEmpty + var scout = tail + if (cursor ne scout) { + cursor = scout + if (scout.tailDefined) { + scout = scout.tail + // Use 2x 1x iterator trick for cycle detection; slow iterator can add strings + while ((cursor ne scout) && scout.tailDefined) { + appendCursorElement() + cursor = cursor.tail + scout = scout.tail + if (scout.tailDefined) scout = scout.tail + } + } + } + if (!scout.tailDefined) { // Not a cycle, scout hit an end + while (cursor ne scout) { + appendCursorElement() + cursor = cursor.tail + } + if (cursor.nonEmpty) { + appendCursorElement() + } + } + else { + // Cycle. + // If we have a prefix of length P followed by a cycle of length C, + // the scout will be at position (P%C) in the cycle when the cursor + // enters it at P. They'll then collide when the scout advances another + // C - (P%C) ahead of the cursor. + // If we run the scout P farther, then it will be at the start of + // the cycle: (C - (P%C) + (P%C)) == C == 0. So if another runner + // starts at the beginning of the prefix, they'll collide exactly at + // the start of the loop. + var runner = this + var k = 0 + while (runner ne scout) { + runner = runner.tail + scout = scout.tail + k += 1 + } + // Now runner and scout are at the beginning of the cycle. Advance + // cursor, adding to string, until it hits; then we'll have covered + // everything once. If cursor is already at beginning, we'd better + // advance one first unless runner didn't go anywhere (in which case + // we've already looped once). + if ((cursor eq scout) && (k > 0)) { + appendCursorElement() + cursor = cursor.tail + } + while (cursor ne scout) { + appendCursorElement() + cursor = cursor.tail + } + } + } + if (cursor.nonEmpty) { + // Either undefined or cyclic; we can check with tailDefined + if (!cursor.tailDefined) b.append(sep).append("") + else b.append(sep).append("") + } + } + b.append(end) + } + + /** + * @return a string representation of this collection. Undefined elements are + * represented with `"_"`, an undefined tail is represented with `"<not computed>"`, + * and cycles are represented with `"<cycle>"` + * + * Examples: + * + * - `"Stream(_, <not computed>)"`, a non-empty stream, whose head has not been + * evaluated ; + * - `"Stream(_, 1, _, <not computed>)"`, a stream with at least three elements, + * the second one has been evaluated ; + * - `"Stream(1, 2, 3, <cycle>)"`, an infinite stream that contains + * a cycle at the fourth element. + */ + override def toString = addStringNoForce(new JStringBuilder(className), "(", ", ", ")").toString + + @deprecated("Check .knownSize instead of .hasDefiniteSize for more actionable information (see scaladoc for details)", "2.13.0") + override def hasDefiniteSize: Boolean = isEmpty || { + if (!tailDefined) false + else { + // Two-iterator trick (2x & 1x speed) for cycle detection. + var those = this + var these = tail + while (those ne these) { + if (these.isEmpty) return true + if (!these.tailDefined) return false + these = these.tail + if (these.isEmpty) return true + if (!these.tailDefined) return false + these = these.tail + if (those eq these) return false + those = those.tail + } + false // Cycle detected + } + } +} + +@deprecated("Use LazyList (which is fully lazy) instead of Stream (which has a lazy tail only)", "2.13.0") +@SerialVersionUID(3L) +object Stream extends SeqFactory[Stream] { + + /* !!! #11997 This `object cons` must be defined lexically *before* `class Cons` below. + * Otherwise it prevents Scala.js from building on Windows. + */ + /** An alternative way of building and matching Streams using Stream.cons(hd, tl). + */ + object cons { + /** A stream consisting of a given first element and remaining elements + * @param hd The first element of the result stream + * @param tl The remaining elements of the result stream + */ + def apply[A](hd: A, tl: => Stream[A]): Stream[A] = new Cons(hd, tl) + + /** Maps a stream to its head and tail */ + def unapply[A](xs: Stream[A]): Option[(A, Stream[A])] = #::.unapply(xs) + } + + //@SerialVersionUID(3L) //TODO Putting an annotation on Stream.empty causes a cyclic dependency in unpickling + object Empty extends Stream[Nothing] { + override def isEmpty: Boolean = true + override def head: Nothing = throw new NoSuchElementException("head of empty stream") + override def tail: Stream[Nothing] = throw new UnsupportedOperationException("tail of empty stream") + /** Forces evaluation of the whole `Stream` and returns it. + * + * @note Often we use `Stream`s to represent an infinite set or series. If + * that's the case for your particular `Stream` then this function will never + * return and will probably crash the VM with an `OutOfMemory` exception. + * This function will not hang on a finite cycle, however. + * + * @return The fully realized `Stream`. + */ + def force: this.type = this + override def knownSize: Int = 0 + protected def tailDefined: Boolean = false + } + + @SerialVersionUID(3L) + final class Cons[A](override val head: A, tl: => Stream[A]) extends Stream[A] { + override def isEmpty: Boolean = false + @volatile private[this] var tlVal: Stream[A] = _ + @volatile private[this] var tlGen = () => tl + protected def tailDefined: Boolean = tlGen eq null + override def tail: Stream[A] = { + if (!tailDefined) + synchronized { + if (!tailDefined) { + tlVal = tlGen() + tlGen = null + } + } + tlVal + } + + /** Forces evaluation of the whole `Stream` and returns it. + * + * @note Often we use `Stream`s to represent an infinite set or series. If + * that's the case for your particular `Stream` then this function will never + * return and will probably crash the VM with an `OutOfMemory` exception. + * This function will not hang on a finite cycle, however. + * + * @return The fully realized `Stream`. + */ + def force: this.type = { + // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) + var these, those: Stream[A] = this + if (!these.isEmpty) these = these.tail + while (those ne these) { + if (these.isEmpty) return this + these = these.tail + if (these.isEmpty) return this + these = these.tail + if (these eq those) return this + those = those.tail + } + this + } + + } + + implicit def toDeferrer[A](l: => Stream[A]): Deferrer[A] = new Deferrer[A](() => l) + + final class Deferrer[A] private[Stream] (private val l: () => Stream[A]) extends AnyVal { + /** Construct a Stream consisting of a given first element followed by elements + * from another Stream. + */ + def #:: [B >: A](elem: B): Stream[B] = new Cons(elem, l()) + /** Construct a Stream consisting of the concatenation of the given Stream and + * another Stream. + */ + def #:::[B >: A](prefix: Stream[B]): Stream[B] = prefix lazyAppendedAll l() + } + + object #:: { + def unapply[A](s: Stream[A]): Option[(A, Stream[A])] = + if (s.nonEmpty) Some((s.head, s.tail)) else None + } + + def from[A](coll: collection.IterableOnce[A]): Stream[A] = coll match { + case coll: Stream[A] => coll + case _ => fromIterator(coll.iterator) + } + + /** + * @return A `Stream[A]` that gets its elements from the given `Iterator`. + * + * @param it Source iterator + * @tparam A type of elements + */ + // Note that the resulting `Stream` will be effectively iterable more than once because + // `Stream` memoizes its elements + def fromIterator[A](it: Iterator[A]): Stream[A] = + if (it.hasNext) { + new Stream.Cons(it.next(), fromIterator(it)) + } else Stream.Empty + + def empty[A]: Stream[A] = Empty + + override def newBuilder[A]: mutable.Builder[A, Stream[A]] = ArrayBuffer.newBuilder[A].mapResult(array => from(array)) + + private[immutable] def withFilter[A](l: Stream[A] @uncheckedVariance, p: A => Boolean): collection.WithFilter[A, Stream] = + new WithFilter[A](l, p) + + private[this] final class WithFilter[A](l: Stream[A] @uncheckedVariance, p: A => Boolean) extends collection.WithFilter[A, Stream] { + private[this] var s = l // set to null to allow GC after filtered + private[this] lazy val filtered: Stream[A] = { val f = s.filter(p); s = null.asInstanceOf[Stream[A]]; f } // don't set to null if throw during filter + def map[B](f: A => B): Stream[B] = filtered.map(f) + def flatMap[B](f: A => IterableOnce[B]): Stream[B] = filtered.flatMap(f) + def foreach[U](f: A => U): Unit = filtered.foreach(f) + def withFilter(q: A => Boolean): collection.WithFilter[A, Stream] = new WithFilter(filtered, q) + } + + /** An infinite Stream that repeatedly applies a given function to a start value. + * + * @param start the start value of the Stream + * @param f the function that's repeatedly applied + * @return the Stream returning the infinite sequence of values `start, f(start), f(f(start)), ...` + */ + def iterate[A](start: A)(f: A => A): Stream[A] = { + cons(start, iterate(f(start))(f)) + } + + /** + * Create an infinite Stream starting at `start` and incrementing by + * step `step`. + * + * @param start the start value of the Stream + * @param step the increment value of the Stream + * @return the Stream starting at value `start`. + */ + def from(start: Int, step: Int): Stream[Int] = + cons(start, from(start + step, step)) + + /** + * Create an infinite Stream starting at `start` and incrementing by `1`. + * + * @param start the start value of the Stream + * @return the Stream starting at value `start`. + */ + def from(start: Int): Stream[Int] = from(start, 1) + + /** + * Create an infinite Stream containing the given element expression (which + * is computed for each occurrence). + * + * @param elem the element composing the resulting Stream + * @return the Stream containing an infinite number of elem + */ + def continually[A](elem: => A): Stream[A] = cons(elem, continually(elem)) + + + private[Stream] def filteredTail[A](stream: Stream[A] @uncheckedVariance, p: A => Boolean, isFlipped: Boolean) = { + cons(stream.head, stream.tail.filterImpl(p, isFlipped)) + } + + private[Stream] def collectedTail[A, B](head: B, stream: Stream[A] @uncheckedVariance, pf: PartialFunction[A, B]) = { + cons(head, stream.tail.collect(pf)) + } + + /** This serialization proxy is used for Streams which start with a sequence of evaluated cons cells. + * The forced sequence is serialized in a compact, sequential format, followed by the unevaluated tail, which uses + * standard Java serialization to store the complete structure of unevaluated thunks. This allows the serialization + * of long evaluated streams without exhausting the stack through recursive serialization of cons cells. + */ + @SerialVersionUID(3L) + class SerializationProxy[A](@transient protected var coll: Stream[A]) extends Serializable { + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + var these = coll + while(these.nonEmpty && these.tailDefined) { + out.writeObject(these.head) + these = these.tail + } + out.writeObject(SerializeEnd) + out.writeObject(these) + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + val init = new ArrayBuffer[A] + var initRead = false + while (!initRead) in.readObject match { + case SerializeEnd => initRead = true + case a => init += a.asInstanceOf[A] + } + val tail = in.readObject().asInstanceOf[Stream[A]] + coll = (init ++: tail) + } + + protected[this] def readResolve(): Any = coll + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala b/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala new file mode 100644 index 000000000000..db5192edc36c --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala @@ -0,0 +1,80 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +/** + * Trait that overrides operations to take advantage of strict builders. + */ +trait StrictOptimizedSeqOps[+A, +CC[_], +C] + extends Any + with SeqOps[A, CC, C] + with collection.StrictOptimizedSeqOps[A, CC, C] + with StrictOptimizedIterableOps[A, CC, C] { + + override def distinctBy[B](f: A => B): C = { + if (lengthCompare(1) <= 0) coll + else { + val builder = newSpecificBuilder + val seen = mutable.HashSet.empty[B] + val it = this.iterator + var different = false + while (it.hasNext) { + val next = it.next() + if (seen.add(f(next))) builder += next else different = true + } + if (different) builder.result() else coll + } + } + + override def updated[B >: A](index: Int, elem: B): CC[B] = { + if (index < 0) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${if (knownSize>=0) knownSize else "unknown"})") + val b = iterableFactory.newBuilder[B] + if (knownSize >= 0) { + b.sizeHint(size) + } + var i = 0 + val it = iterator + while (i < index && it.hasNext) { + b += it.next() + i += 1 + } + if (!it.hasNext) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${i-1})") + b += elem + it.next() + while (it.hasNext) b += it.next() + b.result() + } + + override def patch[B >: A](from: Int, other: IterableOnce[B], replaced: Int): CC[B] = { + val b = iterableFactory.newBuilder[B] + var i = 0 + val it = iterator + while (i < from && it.hasNext) { + b += it.next() + i += 1 + } + b ++= other + i = replaced + while (i > 0 && it.hasNext) { + it.next() + i -= 1 + } + while (it.hasNext) b += it.next() + b.result() + } + + override def sorted[B >: A](implicit ord: Ordering[B]): C = super.sorted(ord) + +} diff --git a/tests/pos-special/stdlib/collection/immutable/TreeMap.scala b/tests/pos-special/stdlib/collection/immutable/TreeMap.scala new file mode 100644 index 000000000000..a51c7b9e7bf6 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/TreeMap.scala @@ -0,0 +1,370 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.tailrec +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.{RedBlackTree => RB} +import scala.collection.mutable.ReusableBuilder +import scala.runtime.AbstractFunction2 + +/** An immutable SortedMap whose values are stored in a red-black tree. + * + * This class is optimal when range queries will be performed, + * or when traversal in order of an ordering is desired. + * If you only need key lookups, and don't care in which order key-values + * are traversed in, consider using * [[scala.collection.immutable.HashMap]], + * which will generally have better performance. If you need insertion order, + * consider a * [[scala.collection.immutable.SeqMap]], which does not need to + * have an ordering supplied. + * + * @example {{{ + * import scala.collection.immutable.TreeMap + * + * // Make a TreeMap via the companion object factory + * val weekdays = TreeMap( + * 2 -> "Monday", + * 3 -> "Tuesday", + * 4 -> "Wednesday", + * 5 -> "Thursday", + * 6 -> "Friday" + * ) + * // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday) + * + * val days = weekdays ++ List(1 -> "Sunday", 7 -> "Saturday") + * // TreeMap(1 -> Sunday, 2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday, 7 -> Saturday) + * + * val day3 = days.get(3) // Some("Tuesday") + * + * val rangeOfDays = days.range(2, 5) // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday) + * + * val daysUntil2 = days.rangeUntil(2) // TreeMap(1 -> Sunday) + * val daysTo2 = days.rangeTo(2) // TreeMap(1 -> Sunday, 2 -> Monday) + * val daysAfter5 = days.rangeFrom(5) // TreeMap(5 -> Thursday, 6 -> Friday, 7 -> Saturday) + * }}} + * + * @tparam K the type of the keys contained in this tree map. + * @tparam V the type of the values associated with the keys. + * @param ordering the implicit ordering used to compare objects of type `A`. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]] + * section on `Red-Black Trees` for more information. + * + * @define Coll immutable.TreeMap + * @define coll immutable tree map + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class TreeMap[K, +V] private (private val tree: RB.Tree[K, V])(implicit val ordering: Ordering[K]) + extends AbstractMap[K, V] + with SortedMap[K, V] + with StrictOptimizedSortedMapOps[K, V, TreeMap, TreeMap[K, V]] + with SortedMapFactoryDefaults[K, V, TreeMap, Iterable, Map] + with DefaultSerializable { + + def this()(implicit ordering: Ordering[K]) = this(null)(ordering) + private[immutable] def tree0: RB.Tree[K, V] = tree + + private[this] def newMapOrSelf[V1 >: V](t: RB.Tree[K, V1]): TreeMap[K, V1] = if(t eq tree) this else new TreeMap[K, V1](t) + + override def sortedMapFactory: SortedMapFactory[TreeMap] = TreeMap + + def iterator: Iterator[(K, V)] = RB.iterator(tree) + + def keysIteratorFrom(start: K): Iterator[K] = RB.keysIterator(tree, Some(start)) + + override def keySet: TreeSet[K] = new TreeSet(tree)(ordering) + + def iteratorFrom(start: K): Iterator[(K, V)] = RB.iterator(tree, Some(start)) + + override def valuesIteratorFrom(start: K): Iterator[V] = RB.valuesIterator(tree, Some(start)) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = + shape.parUnbox( + scala.collection.convert.impl.AnyBinaryTreeStepper.from[(K, V), RB.Tree[K, V]]( + size, tree, _.left, _.right, x => (x.key, x.value) + ) + ) + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Tree[K, V] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree, _.left, _.right, _.key.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[K, T](size, tree, _.left, _.right, _.key)) + } + s.asInstanceOf[S with EfficientSplit] + } + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Tree[K, V] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[V, T] (size, tree, _.left, _.right, _.value.asInstanceOf[V])) + } + s.asInstanceOf[S with EfficientSplit] + } + + def get(key: K): Option[V] = RB.get(tree, key) + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val resultOrNull = RB.lookup(tree, key) + if (resultOrNull eq null) default + else resultOrNull.value + } + + def removed(key: K): TreeMap[K,V] = + newMapOrSelf(RB.delete(tree, key)) + + def updated[V1 >: V](key: K, value: V1): TreeMap[K, V1] = + newMapOrSelf(RB.update(tree, key, value, overwrite = true)) + + override def concat[V1 >: V](that: collection.IterableOnce[(K, V1)]): TreeMap[K, V1] = + newMapOrSelf(that match { + case tm: TreeMap[K, V] @unchecked if ordering == tm.ordering => + RB.union(tree, tm.tree) + case ls: LinearSeq[(K,V1)] => + if (ls.isEmpty) tree //to avoid the creation of the adder + else { + val adder = new Adder[V1] + adder.addAll(ls) + adder.finalTree + } + case _ => + val adder = new Adder[V1] + val it = that.iterator + while (it.hasNext) { + adder.apply(it.next()) + } + adder.finalTree + }) + + override def removedAll(keys: IterableOnce[K]): TreeMap[K, V] = keys match { + case ts: TreeSet[K] if ordering == ts.ordering => + newMapOrSelf(RB.difference(tree, ts.tree)) + case _ => super.removedAll(keys) + } + + /** A new TreeMap with the entry added is returned, + * assuming that key is not in the TreeMap. + * + * @tparam V1 type of the values of the new bindings, a supertype of `V` + * @param key the key to be inserted + * @param value the value to be associated with `key` + * @return a new $coll with the inserted binding, if it wasn't present in the map + */ + @deprecated("Use `updated` instead", "2.13.0") + def insert[V1 >: V](key: K, value: V1): TreeMap[K, V1] = { + assert(!RB.contains(tree, key)) + updated(key, value) + } + + def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = newMapOrSelf(RB.rangeImpl(tree, from, until)) + + override def minAfter(key: K): Option[(K, V)] = RB.minAfter(tree, key) match { + case null => Option.empty + case x => Some((x.key, x.value)) + } + + override def maxBefore(key: K): Option[(K, V)] = RB.maxBefore(tree, key) match { + case null => Option.empty + case x => Some((x.key, x.value)) + } + + override def range(from: K, until: K): TreeMap[K,V] = newMapOrSelf(RB.range(tree, from, until)) + + override def foreach[U](f: ((K, V)) => U): Unit = RB.foreach(tree, f) + override def foreachEntry[U](f: (K, V) => U): Unit = RB.foreachEntry(tree, f) + override def size: Int = RB.count(tree) + override def knownSize: Int = size + + override def isEmpty = size == 0 + + override def firstKey: K = RB.smallest(tree).key + + override def lastKey: K = RB.greatest(tree).key + + override def head: (K, V) = { + val smallest = RB.smallest(tree) + (smallest.key, smallest.value) + } + + override def last: (K, V) = { + val greatest = RB.greatest(tree) + (greatest.key, greatest.value) + } + + override def tail: TreeMap[K, V] = new TreeMap(RB.tail(tree)) + + override def init: TreeMap[K, V] = new TreeMap(RB.init(tree)) + + override def drop(n: Int): TreeMap[K, V] = { + if (n <= 0) this + else if (n >= size) empty + else new TreeMap(RB.drop(tree, n)) + } + + override def take(n: Int): TreeMap[K, V] = { + if (n <= 0) empty + else if (n >= size) this + else new TreeMap(RB.take(tree, n)) + } + + override def slice(from: Int, until: Int) = { + if (until <= from) empty + else if (from <= 0) take(until) + else if (until >= size) drop(from) + else new TreeMap(RB.slice(tree, from, until)) + } + + override def dropRight(n: Int): TreeMap[K, V] = take(size - math.max(n, 0)) + + override def takeRight(n: Int): TreeMap[K, V] = drop(size - math.max(n, 0)) + + private[this] def countWhile(p: ((K, V)) => Boolean): Int = { + var result = 0 + val it = iterator + while (it.hasNext && p(it.next())) result += 1 + result + } + + override def dropWhile(p: ((K, V)) => Boolean): TreeMap[K, V] = drop(countWhile(p)) + + override def takeWhile(p: ((K, V)) => Boolean): TreeMap[K, V] = take(countWhile(p)) + + override def span(p: ((K, V)) => Boolean): (TreeMap[K, V], TreeMap[K, V]) = splitAt(countWhile(p)) + + override def filter(f: ((K, V)) => Boolean): TreeMap[K, V] = + newMapOrSelf(RB.filterEntries[K, V](tree, (k, v) => f((k, v)))) + + override def partition(p: ((K, V)) => Boolean): (TreeMap[K, V], TreeMap[K, V]) = { + val (l, r) = RB.partitionEntries[K, V](tree, (k, v) => p((k, v))) + (newMapOrSelf(l), newMapOrSelf(r)) + } + + override def transform[W](f: (K, V) => W): TreeMap[K, W] = { + val t2 = RB.transform[K, V, W](tree, f) + if(t2 eq tree) this.asInstanceOf[TreeMap[K, W]] + else new TreeMap(t2) + } + + private final class Adder[B1 >: V] + extends RB.MapHelper[K, B1] with Function1[(K, B1), Unit] { + private var currentMutableTree: RB.Tree[K,B1] = tree0 + def finalTree = beforePublish(currentMutableTree) + override def apply(kv: (K, B1)): Unit = { + currentMutableTree = mutableUpd(currentMutableTree, kv._1, kv._2) + } + @tailrec def addAll(ls: LinearSeq[(K, B1)]): Unit = { + if (!ls.isEmpty) { + val kv = ls.head + currentMutableTree = mutableUpd(currentMutableTree, kv._1, kv._2) + addAll(ls.tail) + } + } + } + override def equals(obj: Any): Boolean = obj match { + case that: TreeMap[K @unchecked, _] if ordering == that.ordering => RB.entriesEqual(tree, that.tree) + case _ => super.equals(obj) + } + + override protected[this] def className = "TreeMap" +} + +/** $factoryInfo + * @define Coll immutable.TreeMap + * @define coll immutable tree map + */ +@SerialVersionUID(3L) +object TreeMap extends SortedMapFactory[TreeMap] { + + def empty[K : Ordering, V]: TreeMap[K, V] = new TreeMap() + + def from[K, V](it: IterableOnce[(K, V)])(implicit ordering: Ordering[K]): TreeMap[K, V] = + it match { + case tm: TreeMap[K, V] if ordering == tm.ordering => tm + case sm: scala.collection.SortedMap[K, V] if ordering == sm.ordering => + new TreeMap[K, V](RB.fromOrderedEntries(sm.iterator, sm.size)) + case _ => + var t: RB.Tree[K, V] = null + val i = it.iterator + while (i.hasNext) { + val (k, v) = i.next() + t = RB.update(t, k, v, overwrite = true) + } + new TreeMap[K, V](t) + } + + def newBuilder[K, V](implicit ordering: Ordering[K]): ReusableBuilder[(K, V), TreeMap[K, V]] = new TreeMapBuilder[K, V] + + private class TreeMapBuilder[K, V](implicit ordering: Ordering[K]) + extends RB.MapHelper[K, V] + with ReusableBuilder[(K, V), TreeMap[K, V]] { + type Tree = RB.Tree[K, V] + private var tree:Tree = null + + def addOne(elem: (K, V)): this.type = { + tree = mutableUpd(tree, elem._1, elem._2) + this + } + private object adder extends AbstractFunction2[K, V, Unit] { + // we cache tree to avoid the outer access to tree + // in the hot path (apply) + private[this] var accumulator :Tree = null + def addForEach(hasForEach: collection.Map[K, V]): Unit = { + accumulator = tree + hasForEach.foreachEntry(this) + tree = accumulator + // be friendly to GC + accumulator = null + } + + override def apply(key: K, value: V): Unit = { + accumulator = mutableUpd(accumulator, key, value) + } + } + + override def addAll(xs: IterableOnce[(K, V)]): this.type = { + xs match { + // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++= + // for the moment we have to force immutability before the union + // which will waste some time and space + // calling `beforePublish` makes `tree` immutable + case ts: TreeMap[K, V] if ts.ordering == ordering => + if (tree eq null) tree = ts.tree0 + else tree = RB.union(beforePublish(tree), ts.tree0) + case that: collection.Map[K, V] => + //add avoiding creation of tuples + adder.addForEach(that) + case _ => + super.addAll(xs) + } + this + } + + override def clear(): Unit = { + tree = null + } + + override def result(): TreeMap[K, V] = new TreeMap[K, V](beforePublish(tree)) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala b/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala new file mode 100644 index 000000000000..80bafb1cf3be --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala @@ -0,0 +1,649 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.tailrec + +/** This class implements an immutable map that preserves order using + * a hash map for the key to value mapping to provide efficient lookup, + * and a tree for the ordering of the keys to provide efficient + * insertion/modification order traversal and destructuring. + * + * By default insertion order (`TreeSeqMap.OrderBy.Insertion`) + * is used, but modification order (`TreeSeqMap.OrderBy.Modification`) + * can be used instead if so specified at creation. + * + * The `orderingBy(orderBy: TreeSeqMap.OrderBy): TreeSeqMap[K, V]` method + * can be used to switch to the specified ordering for the returned map. + * + * A key can be manually refreshed (i.e. placed at the end) via the + * `refresh(key: K): TreeSeqMap[K, V]` method (regardless of the ordering in + * use). + * + * Internally, an ordinal counter is increased for each insertion/modification + * and then the current ordinal is used as key in the tree map. After 2^32^ + * insertions/modifications the entire map is copied (thus resetting the ordinal + * counter). + * + * @tparam K the type of the keys contained in this map. + * @tparam V the type of the values associated with the keys in this map. + * @define coll immutable tree seq map + * @define Coll `immutable.TreeSeqMap` + */ +final class TreeSeqMap[K, +V] private ( + private val ordering: TreeSeqMap.Ordering[K], + private val mapping: TreeSeqMap.Mapping[K, V], + private val ordinal: Int, + val orderedBy: TreeSeqMap.OrderBy) + extends AbstractMap[K, V] + with SeqMap[K, V] + with MapOps[K, V, TreeSeqMap, TreeSeqMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, TreeSeqMap[K, V]] + with StrictOptimizedMapOps[K, V, TreeSeqMap, TreeSeqMap[K, V]] + with MapFactoryDefaults[K, V, TreeSeqMap, Iterable] { + + import TreeSeqMap._ + + override protected[this] def className: String = "TreeSeqMap" + + override def mapFactory: MapFactory[TreeSeqMap] = TreeSeqMap + + override val size = mapping.size + + override def knownSize: Int = size + + override def isEmpty = size == 0 + + /* + // This should have been overridden in 2.13.0 but wasn't so it will have to wait since it is not forwards compatible + // Now handled in inherited method from scala.collection.MapFactoryDefaults instead. + override def empty = TreeSeqMap.empty[K, V](orderedBy) + */ + + def orderingBy(orderBy: OrderBy): TreeSeqMap[K, V] = { + if (orderBy == this.orderedBy) this + else if (isEmpty) TreeSeqMap.empty(orderBy) + else new TreeSeqMap(ordering, mapping, ordinal, orderBy) + } + + def updated[V1 >: V](key: K, value: V1): TreeSeqMap[K, V1] = { + mapping.get(key) match { + case e if ordinal == -1 && (orderedBy == OrderBy.Modification || e.isEmpty) => + // Reinsert into fresh instance to restart ordinal counting, expensive but only done after 2^32 updates. + TreeSeqMap.empty[K, V1](orderedBy) ++ this + (key -> value) + case Some((o, _)) if orderedBy == OrderBy.Insertion => + new TreeSeqMap( + ordering.include(o, key), + mapping.updated[(Int, V1)](key, (o, value)), + ordinal, // Do not increment the ordinal since the key is already present, i.e. o <= ordinal. + orderedBy) + case Some((o, _)) => + val o1 = increment(ordinal) + new TreeSeqMap( + ordering.exclude(o).append(o1, key), + mapping.updated[(Int, V1)](key, (o1, value)), + o1, + orderedBy) + case None => + val o1 = increment(ordinal) + new TreeSeqMap( + ordering.append(o1, key), + mapping.updated[(Int, V1)](key, (o1, value)), + o1, + orderedBy) + } + } + + def removed(key: K): TreeSeqMap[K, V] = { + mapping.get(key) match { + case Some((o, _)) => + new TreeSeqMap( + ordering.exclude(o), + mapping.removed(key), + ordinal, + orderedBy) + case None => + this + } + } + + def refresh(key: K): TreeSeqMap[K, V] = { + mapping.get(key) match { + case Some((o, _)) => + val o1 = increment(ordinal) + new TreeSeqMap( + ordering.exclude(o).append(o1, key), + mapping, + o1, + orderedBy) + case None => + this + } + } + + def get(key: K): Option[V] = mapping.get(key).map(value) + + def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { + private[this] val iter = ordering.iterator + + override def hasNext: Boolean = iter.hasNext + + override def next(): (K, V) = binding(iter.next()) + } + + override def keysIterator: Iterator[K] = new AbstractIterator[K] { + private[this] val iter = ordering.iterator + + override def hasNext: Boolean = iter.hasNext + + override def next(): K = iter.next() + } + + override def valuesIterator: Iterator[V] = new AbstractIterator[V] { + private[this] val iter = ordering.iterator + + override def hasNext: Boolean = iter.hasNext + + override def next(): V = value(binding(iter.next())) + } + + override def contains(key: K): Boolean = mapping.contains(key) + + override def head: (K, V) = binding(ordering.head) + + override def headOption = ordering.headOption.map(binding) + + override def last: (K, V) = binding(ordering.last) + + override def lastOption: Option[(K, V)] = ordering.lastOption.map(binding) + + override def tail: TreeSeqMap[K, V] = { + val (head, tail) = ordering.headTail + new TreeSeqMap(tail, mapping.removed(head), ordinal, orderedBy) + } + + override def init: TreeSeqMap[K, V] = { + val (init, last) = ordering.initLast + new TreeSeqMap(init, mapping.removed(last), ordinal, orderedBy) + } + + override def slice(from: Int, until: Int): TreeSeqMap[K, V] = { + val sz = size + if (sz == 0 || from >= until) TreeSeqMap.empty[K, V](orderedBy) + else { + val sz = size + val f = if (from >= 0) from else 0 + val u = if (until <= sz) until else sz + val l = u - f + if (l <= 0) TreeSeqMap.empty[K, V](orderedBy) + else if (l > sz / 2) { + // Remove front and rear incrementally if majority of elements are to be kept + val (front, rest) = ordering.splitAt(f) + val (ong, rear) = rest.splitAt(l) + var mng = this.mapping + val frontIter = front.iterator + while (frontIter.hasNext) { + mng = mng - frontIter.next() + } + val rearIter = rear.iterator + while (rearIter.hasNext) { + mng = mng - rearIter.next() + } + new TreeSeqMap(ong, mng, ordinal, orderedBy) + } else { + // Populate with builder otherwise + val bdr = newBuilder[K, V](orderedBy) + val iter = ordering.iterator + var i = 0 + while (i < f) { + iter.next() + i += 1 + } + while (i < u) { + val k = iter.next() + bdr.addOne((k, mapping(k)._2)) + i += 1 + } + bdr.result() + } + } + } + + override def map[K2, V2](f: ((K, V)) => (K2, V2)): TreeSeqMap[K2, V2] = { + val bdr = newBuilder[K2, V2](orderedBy) + val iter = ordering.iterator + while (iter.hasNext) { + val k = iter.next() + val (_, v) = mapping(k) + val (k2, v2) = f((k, v)) + bdr.addOne((k2, v2)) + } + bdr.result() + } + + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): TreeSeqMap[K2, V2] = { + val bdr = newBuilder[K2, V2](orderedBy) + val iter = ordering.iterator + while (iter.hasNext) { + val k = iter.next() + val (_, v) = mapping(k) + val jter = f((k, v)).iterator + while (jter.hasNext) { + val (k2, v2) = jter.next() + bdr.addOne((k2, v2)) + } + } + bdr.result() + } + + override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): TreeSeqMap[K2, V2] = { + val bdr = newBuilder[K2, V2](orderedBy) + val iter = ordering.iterator + while (iter.hasNext) { + val k = iter.next() + val (_, v) = mapping(k) + pf.runWith({ case (k2, v2) => bdr.addOne((k2, v2)) })((k, v)) + } + bdr.result() + } + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): TreeSeqMap[K, V2] = { + var ong: Ordering[K] = ordering + var mng: Mapping[K, V2] = mapping + var ord = increment(ordinal) + val iter = suffix.iterator + while (iter.hasNext) { + val (k, v2) = iter.next() + mng.get(k) match { + case Some((o, v)) => + if (orderedBy == OrderBy.Insertion && v != v2) mng = mng.updated(k, (o, v2)) + else if (orderedBy == OrderBy.Modification) { + mng = mng.updated(k, (ord, v2)) + ong = ong.exclude(o).append(ord, k) + ord = increment(ord) + } + case None => + mng = mng.updated(k, (ord, v2)) + ong = ong.append(ord, k) + ord = increment(ord) + } + } + new TreeSeqMap[K, V2](ong, mng, ord, orderedBy) + } + + @`inline` private[this] def value(p: (_, V)) = p._2 + @`inline` private[this] def binding(k: K) = mapping(k).copy(_1 = k) +} +object TreeSeqMap extends MapFactory[TreeSeqMap] { + sealed trait OrderBy + object OrderBy { + case object Insertion extends OrderBy + case object Modification extends OrderBy + } + + private val EmptyByInsertion = new TreeSeqMap[Nothing, Nothing](Ordering.empty, HashMap.empty, 0, OrderBy.Insertion) + private val EmptyByModification = new TreeSeqMap[Nothing, Nothing](Ordering.empty, HashMap.empty, 0, OrderBy.Modification) + val Empty = EmptyByInsertion + def empty[K, V]: TreeSeqMap[K, V] = empty(OrderBy.Insertion) + def empty[K, V](orderBy: OrderBy): TreeSeqMap[K, V] = { + if (orderBy == OrderBy.Modification) EmptyByModification + else EmptyByInsertion + }.asInstanceOf[TreeSeqMap[K, V]] + + def from[K, V](it: collection.IterableOnce[(K, V)]): TreeSeqMap[K, V] = + it match { + case om: TreeSeqMap[K, V] => om + case _ => (newBuilder[K, V] ++= it).result() + } + + @inline private def increment(ord: Int) = if (ord == Int.MaxValue) Int.MinValue else ord + 1 + + def newBuilder[K, V]: mutable.Builder[(K, V), TreeSeqMap[K, V]] = newBuilder(OrderBy.Insertion) + def newBuilder[K, V](orderedBy: OrderBy): mutable.Builder[(K, V), TreeSeqMap[K, V]] = new Builder[K, V](orderedBy) + + final class Builder[K, V](orderedBy: OrderBy) extends mutable.Builder[(K, V), TreeSeqMap[K, V]] { + private[this] val bdr = new MapBuilderImpl[K, (Int, V)] + private[this] var ong = Ordering.empty[K] + private[this] var ord = 0 + private[this] var aliased: TreeSeqMap[K, V] = _ + + override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) + def addOne(key: K, value: V): this.type = { + if (aliased ne null) { + aliased = aliased.updated(key, value) + } else { + bdr.getOrElse(key, null) match { + case (o, v) => + if (orderedBy == OrderBy.Insertion && v != value) bdr.addOne(key, (o, value)) + else if (orderedBy == OrderBy.Modification) { + bdr.addOne(key, (ord, value)) + ong = ong.exclude(o).appendInPlace(ord, key) + ord = increment(ord) + } + case null => + bdr.addOne(key, (ord, value)) + ong = ong.appendInPlace(ord, key) + ord = increment(ord) + } + } + this + } + + override def clear(): Unit = { + ong = Ordering.empty + ord = 0 + bdr.clear() + aliased = null + } + + override def result(): TreeSeqMap[K, V] = { + if (aliased eq null) { + aliased = new TreeSeqMap(ong, bdr.result(), ord, orderedBy) + } + aliased + } + } + + private type Mapping[K, +V] = Map[K, (Int, V)] + @annotation.unused + private val Mapping = Map + + /* The ordering implementation below is an adapted version of immutable.IntMap. */ + private[immutable] object Ordering { + import scala.collection.generic.BitOperations.Int._ + + @inline private[immutable] def toBinaryString(i: Int): String = s"$i/${i.toBinaryString}" + + def empty[T] : Ordering[T] = Zero + + def apply[T](elems: (Int, T)*): Ordering[T] = + elems.foldLeft(empty[T])((x, y) => x.include(y._1, y._2)) + + // Iterator over a non-empty Ordering. + final class Iterator[+V](it: Ordering[V]) { + // Basically this uses a simple stack to emulate conversion over the tree. However + // because we know that Ints are at least 32 bits we can have at most 32 Bins and + // one Tip sitting on the tree at any point. Therefore we know the maximum stack + // depth is 33 + private[this] var index = 0 + private[this] val buffer = new Array[AnyRef](33) + + private[this] def pop = { + index -= 1 + buffer(index).asInstanceOf[Ordering[V]] + } + + private[this] def push[V2 >: V](x: Ordering[V2]): Unit = { + buffer(index) = x.asInstanceOf[AnyRef] + index += 1 + } + + if (it != Zero) push(it) + + def hasNext = index != 0 + @tailrec + def next(): V = + pop match { + case Bin(_,_, Tip(_, v), right) => + push(right) + v + case Bin(_, _, left, right) => + push(right) + push(left) + next() + case Tip(_, v) => v + // This should never happen. We don't allow Ordering.Zero in subtrees of the Ordering + // and don't return an Ordering.Iterator for Ordering.Zero. + case Zero => throw new IllegalStateException("empty subtree not allowed") + } + } + + object Iterator { + val Empty = new Iterator[Nothing](Ordering.empty[Nothing]) + def empty[V]: Iterator[V] = Empty.asInstanceOf[Iterator[V]] + } + + case object Zero extends Ordering[Nothing] { + // Important! Without this equals method in place, an infinite + // loop from Map.equals => size => pattern-match-on-Nil => equals + // develops. Case objects and custom equality don't mix without + // careful handling. + override def equals(that : Any): Boolean = that match { + case _: this.type => true + case _: Ordering[_] => false // The only empty Orderings are eq Nil + case _ => super.equals(that) + } + protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = sb ++= s"${prefix}Ø" + } + + final case class Tip[+T](ord: Int, value: T) extends Ordering[T] { + def withValue[S](s: S) = + if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[Tip[S]] + else Tip(ord, s) + protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = sb ++= s"${prefix}Tip(${toBinaryString(ord)} -> $value)\n" + } + + final case class Bin[+T](prefix: Int, mask: Int, left: Ordering[T], var right: Ordering[T] @scala.annotation.unchecked.uncheckedVariance) extends Ordering[T] { + def bin[S](left: Ordering[S], right: Ordering[S]): Ordering[S] = { + if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[Bin[S]] + else Bin[S](prefix, mask, left, right) + } + protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = { + sb ++= s"${prefix}Bin(${toBinaryString(this.prefix)}:${toBinaryString(mask)})\n" + left.format(sb, subPrefix + "├── ", subPrefix + "│ ") + right.format(sb, subPrefix + "└── ", subPrefix + " ") + } + } + + private def branchMask(i: Int, j: Int) = highestOneBit(i ^ j) + + private def join[T](p1: Int, t1: Ordering[T], p2: Int, t2: Ordering[T]): Ordering[T] = { + val m = branchMask(p1, p2) + val p = mask(p1, m) + if (zero(p1, m)) Bin(p, m, t1, t2) + else Bin(p, m, t2, t1) + } + + private def bin[T](prefix: Int, mask: Int, left: Ordering[T], right: Ordering[T]): Ordering[T] = (left, right) match { + case (l, Zero) => l + case (Zero, r) => r + case (l, r) => Bin(prefix, mask, l, r) + } + } + + sealed abstract class Ordering[+T] { + import Ordering._ + import scala.annotation.tailrec + import scala.collection.generic.BitOperations.Int._ + + override final def toString: String = format + final def format: String = { + val sb = new StringBuilder + format(sb, "", "") + sb.toString() + } + protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit + + @tailrec + final def head: T = this match { + case Zero => throw new NoSuchElementException("head of empty map") + case Tip(k, v) => v + case Bin(_, _, l, _) => l.head + } + + @tailrec + final def headOption: Option[T] = this match { + case Zero => None + case Tip(_, v) => Some(v) + case Bin(_, _, l, _) => l.headOption + } + + @tailrec + final def last: T = this match { + case Zero => throw new NoSuchElementException("last of empty map") + case Tip(_, v) => v + case Bin(_, _, _, r) => r.last + } + + @tailrec + final def lastOption: Option[T] = this match { + case Zero => None + case Tip(_, v) => Some(v) + case Bin(_, _, _, r) => r.lastOption + } + + @tailrec + final def ordinal: Int = this match { + case Zero => 0 + case Tip(o, _) => o + case Bin(_, _, _, r) => r.ordinal + } + + final def tail: Ordering[T] = this match { + case Zero => throw new NoSuchElementException("tail of empty map") + case Tip(_, _) => Zero + case Bin(p, m, l, r) => bin(p, m, l.tail, r) + } + + final def headTail: (T, Ordering[T]) = this match { + case Zero => throw new NoSuchElementException("init of empty map") + case Tip(_, v) => (v, Zero) + case Bin(p, m, l, r) => + val (head, tail) = l.headTail + (head, bin(p, m, tail, r)) + } + + final def init: Ordering[T] = this match { + case Zero => throw new NoSuchElementException("init of empty map") + case Tip(_, _) => Zero + case Bin(p, m, l, r) => + bin(p, m, l, r.init) + } + + final def initLast: (Ordering[T], T) = this match { + case Zero => throw new NoSuchElementException("init of empty map") + case Tip(_, v) => (Zero, v) + case Bin(p, m, l, r) => + val (init, last) = r.initLast + (bin(p, m, l, init), last) + } + + final def iterator: Iterator[T] = this match { + case Zero => Iterator.empty + case _ => new Iterator(this) + } + + final def include[S >: T](ordinal: Int, value: S): Ordering[S] = this match { + case Zero => + Tip(ordinal, value) + case Tip(o, _) => + if (ordinal == o) Tip(ordinal, value) + else join(ordinal, Tip(ordinal, value), o, this) + case Bin(p, m, l, r) => + if (!hasMatch(ordinal, p, m)) join(ordinal, Tip(ordinal, value), p, this) + else if (zero(ordinal, m)) Bin(p, m, l.include(ordinal, value), r) + else Bin(p, m, l, r.include(ordinal, value)) + } + + final def append[S >: T](ordinal: Int, value: S): Ordering[S] = this match { + case Zero => + Tip(ordinal, value) + case Tip(o, _) => + if (ordinal == o) Tip(ordinal, value) + else join(ordinal, Tip(ordinal, value), o, this) + case Bin(p, m, l, r) => + if (!hasMatch(ordinal, p, m)) join(ordinal, Tip(ordinal, value), p, this) + else if (zero(ordinal, m)) throw new IllegalArgumentException(s"Append called with ordinal out of range: $ordinal is not greater than current max ordinal ${this.ordinal}") + else Bin(p, m, l, r.append(ordinal, value)) + } + + @inline private[collection] final def appendInPlace[S >: T](ordinal: Int, value: S): Ordering[S] = appendInPlace1(null, ordinal, value) + private[collection] final def appendInPlace1[S >: T](parent: Bin[S], ordinal: Int, value: S): Ordering[S] = this match { + case Zero => + Tip(ordinal, value) + case Tip(o, _) if o >= ordinal => + throw new IllegalArgumentException(s"Append called with ordinal out of range: $o is not greater than current max ordinal ${this.ordinal}") + case Tip(o, _) if parent == null => + join(ordinal, Tip(ordinal, value), o, this) + case Tip(o, _) => + parent.right = join(ordinal, Tip(ordinal, value), o, this) + parent + case b @ Bin(p, m, _, r) => + if (!hasMatch(ordinal, p, m)) { + val b2 = join(ordinal, Tip(ordinal, value), p, this) + if (parent != null) { + parent.right = b2 + parent + } else b2 + } else if (zero(ordinal, m)) throw new IllegalArgumentException(s"Append called with ordinal out of range: $ordinal is not greater than current max ordinal ${this.ordinal}") + else { + r.appendInPlace1(b, ordinal, value) + this + } + } + + final def exclude(ordinal: Int): Ordering[T] = this match { + case Zero => + Zero + case Tip(o, _) => + if (ordinal == o) Zero + else this + case Bin(p, m, l, r) => + if (!hasMatch(ordinal, p, m)) this + else if (zero(ordinal, m)) bin(p, m, l.exclude(ordinal), r) + else bin(p, m, l, r.exclude(ordinal)) + } + + final def splitAt(n: Int): (Ordering[T], Ordering[T]) = { + var rear = Ordering.empty[T] + var i = n + (modifyOrRemove { (o, v) => + i -= 1 + if (i >= 0) Some(v) + else { + rear = rear.appendInPlace(o, v) + None + } + }, rear) + } + + /** + * A combined transform and filter function. Returns an `Ordering` such that + * for each `(key, value)` mapping in this map, if `f(key, value) == None` + * the map contains no mapping for key, and if `f(key, value) == Some(x)` the + * map contains `(key, x)`. + * + * @tparam S The type of the values in the resulting `LongMap`. + * @param f The transforming function. + * @return The modified map. + */ + final def modifyOrRemove[S](f: (Int, T) => Option[S]): Ordering[S] = this match { + case Zero => Zero + case Tip(key, value) => + f(key, value) match { + case None => Zero + case Some(value2) => + // hack to preserve sharing + if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[Ordering[S]] + else Tip(key, value2) + } + case Bin(prefix, mask, left, right) => + val l = left.modifyOrRemove(f) + val r = right.modifyOrRemove(f) + if ((left eq l) && (right eq r)) this.asInstanceOf[Ordering[S]] + else bin(prefix, mask, l, r) + } + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/TreeSet.scala b/tests/pos-special/stdlib/collection/immutable/TreeSet.scala new file mode 100644 index 000000000000..f0be91b72acc --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/TreeSet.scala @@ -0,0 +1,296 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.ReusableBuilder +import scala.collection.immutable.{RedBlackTree => RB} +import scala.runtime.AbstractFunction1 + + +/** This class implements immutable sorted sets using a tree. + * + * @tparam A the type of the elements contained in this tree set + * @param ordering the implicit ordering used to compare objects of type `A` + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]] + * section on `Red-Black Trees` for more information. + * + * @define Coll `immutable.TreeSet` + * @define coll immutable tree set + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class TreeSet[A] private[immutable] (private[immutable] val tree: RB.Tree[A, Any])(implicit val ordering: Ordering[A]) + extends AbstractSet[A] + with SortedSet[A] + with SortedSetOps[A, TreeSet, TreeSet[A]] + with StrictOptimizedSortedSetOps[A, TreeSet, TreeSet[A]] + with SortedSetFactoryDefaults[A, TreeSet, Set] + with DefaultSerializable { + + if (ordering eq null) throw new NullPointerException("ordering must not be null") + + def this()(implicit ordering: Ordering[A]) = this(null)(ordering) + + override def sortedIterableFactory = TreeSet + + private[this] def newSetOrSelf(t: RB.Tree[A, Any]) = if(t eq tree) this else new TreeSet[A](t) + + override def size: Int = RB.count(tree) + + override def isEmpty = size == 0 + + override def head: A = RB.smallest(tree).key + + override def last: A = RB.greatest(tree).key + + override def tail: TreeSet[A] = new TreeSet(RB.tail(tree)) + + override def init: TreeSet[A] = new TreeSet(RB.init(tree)) + + override def min[A1 >: A](implicit ord: Ordering[A1]): A = { + if ((ord eq ordering) && nonEmpty) { + head + } else { + super.min(ord) + } + } + + override def max[A1 >: A](implicit ord: Ordering[A1]): A = { + if ((ord eq ordering) && nonEmpty) { + last + } else { + super.max(ord) + } + } + + override def drop(n: Int): TreeSet[A] = { + if (n <= 0) this + else if (n >= size) empty + else new TreeSet(RB.drop(tree, n)) + } + + override def take(n: Int): TreeSet[A] = { + if (n <= 0) empty + else if (n >= size) this + else new TreeSet(RB.take(tree, n)) + } + + override def slice(from: Int, until: Int): TreeSet[A] = { + if (until <= from) empty + else if (from <= 0) take(until) + else if (until >= size) drop(from) + else new TreeSet(RB.slice(tree, from, until)) + } + + override def dropRight(n: Int): TreeSet[A] = take(size - math.max(n, 0)) + + override def takeRight(n: Int): TreeSet[A] = drop(size - math.max(n, 0)) + + private[this] def countWhile(p: A => Boolean): Int = { + var result = 0 + val it = iterator + while (it.hasNext && p(it.next())) result += 1 + result + } + override def dropWhile(p: A => Boolean): TreeSet[A] = drop(countWhile(p)) + + override def takeWhile(p: A => Boolean): TreeSet[A] = take(countWhile(p)) + + override def span(p: A => Boolean): (TreeSet[A], TreeSet[A]) = splitAt(countWhile(p)) + + override def foreach[U](f: A => U): Unit = RB.foreachKey(tree, f) + + override def minAfter(key: A): Option[A] = { + val v = RB.minAfter(tree, key) + if (v eq null) Option.empty else Some(v.key) + } + + override def maxBefore(key: A): Option[A] = { + val v = RB.maxBefore(tree, key) + if (v eq null) Option.empty else Some(v.key) + } + + def iterator: Iterator[A] = RB.keysIterator(tree) + + def iteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start)) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Tree[A, Any] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree, _.left, _.right, _.key.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[A, T](size, tree, _.left, _.right, _.key)) + } + s.asInstanceOf[S with EfficientSplit] + } + + /** Checks if this set contains element `elem`. + * + * @param elem the element to check for membership. + * @return true, iff `elem` is contained in this set. + */ + def contains(elem: A): Boolean = RB.contains(tree, elem) + + override def range(from: A, until: A): TreeSet[A] = newSetOrSelf(RB.range(tree, from, until)) + + def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = newSetOrSelf(RB.rangeImpl(tree, from, until)) + + /** Creates a new `TreeSet` with the entry added. + * + * @param elem a new element to add. + * @return a new $coll containing `elem` and all the elements of this $coll. + */ + def incl(elem: A): TreeSet[A] = + newSetOrSelf(RB.update(tree, elem, null, overwrite = false)) + + /** Creates a new `TreeSet` with the entry removed. + * + * @param elem a new element to add. + * @return a new $coll containing all the elements of this $coll except `elem`. + */ + def excl(elem: A): TreeSet[A] = + newSetOrSelf(RB.delete(tree, elem)) + + override def concat(that: collection.IterableOnce[A]): TreeSet[A] = { + val t = that match { + case ts: TreeSet[A] if ordering == ts.ordering => + RB.union(tree, ts.tree) + case _ => + val it = that.iterator + var t = tree + while (it.hasNext) t = RB.update(t, it.next(), null, overwrite = false) + t + } + newSetOrSelf(t) + } + + override def removedAll(that: IterableOnce[A]): TreeSet[A] = that match { + case ts: TreeSet[A] if ordering == ts.ordering => + newSetOrSelf(RB.difference(tree, ts.tree)) + case _ => + //TODO add an implementation of a mutable subtractor similar to TreeMap + //but at least this doesn't create a TreeSet for each iteration + object sub extends AbstractFunction1[A, Unit] { + var currentTree = tree + override def apply(k: A): Unit = { + currentTree = RB.delete(currentTree, k) + } + } + that.iterator.foreach(sub) + newSetOrSelf(sub.currentTree) + } + + override def intersect(that: collection.Set[A]): TreeSet[A] = that match { + case ts: TreeSet[A] if ordering == ts.ordering => + newSetOrSelf(RB.intersect(tree, ts.tree)) + case _ => + super.intersect(that) + } + + override def diff(that: collection.Set[A]): TreeSet[A] = that match { + case ts: TreeSet[A] if ordering == ts.ordering => + newSetOrSelf(RB.difference(tree, ts.tree)) + case _ => + super.diff(that) + } + + override def filter(f: A => Boolean): TreeSet[A] = newSetOrSelf(RB.filterEntries[A, Any](tree, {(k, _) => f(k)})) + + override def partition(p: A => Boolean): (TreeSet[A], TreeSet[A]) = { + val (l, r) = RB.partitionEntries(tree, {(a:A, _: Any) => p(a)}) + (newSetOrSelf(l), newSetOrSelf(r)) + } + + override def equals(obj: Any): Boolean = obj match { + case that: TreeSet[A @unchecked] if ordering == that.ordering => RB.keysEqual(tree, that.tree) + case _ => super.equals(obj) + } + + override protected[this] def className = "TreeSet" +} + +/** + * $factoryInfo + * + * @define Coll `immutable.TreeSet` + * @define coll immutable tree set + */ +@SerialVersionUID(3L) +object TreeSet extends SortedIterableFactory[TreeSet] { + + def empty[A: Ordering]: TreeSet[A] = new TreeSet[A] + + def from[E](it: scala.collection.IterableOnce[E])(implicit ordering: Ordering[E]): TreeSet[E] = + it match { + case ts: TreeSet[E] if ordering == ts.ordering => ts + case ss: scala.collection.SortedSet[E] if ordering == ss.ordering => + new TreeSet[E](RB.fromOrderedKeys(ss.iterator, ss.size)) + case r: Range if (ordering eq Ordering.Int) || (Ordering.Int isReverseOf ordering) => + val it = if((ordering eq Ordering.Int) == (r.step > 0)) r.iterator else r.reverseIterator + val tree = RB.fromOrderedKeys(it.asInstanceOf[Iterator[E]], r.size) + // The cast is needed to compile with Dotty: + // Dotty doesn't infer that E =:= Int, since instantiation of covariant GADTs is unsound + new TreeSet[E](tree) + case _ => + var t: RB.Tree[E, Null] = null + val i = it.iterator + while (i.hasNext) t = RB.update(t, i.next(), null, overwrite = false) + new TreeSet[E](t) + } + + def newBuilder[A](implicit ordering: Ordering[A]): ReusableBuilder[A, TreeSet[A]] = new TreeSetBuilder[A] + private class TreeSetBuilder[A](implicit ordering: Ordering[A]) + extends RB.SetHelper[A] + with ReusableBuilder[A, TreeSet[A]] { + type Tree = RB.Tree[A, Any] + private [this] var tree:RB.Tree[A, Any] = null + + override def addOne(elem: A): this.type = { + tree = mutableUpd(tree, elem) + this + } + + override def addAll(xs: IterableOnce[A]): this.type = { + xs match { + // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++= + // for the moment we have to force immutability before the union + // which will waste some time and space + // calling `beforePublish` makes `tree` immutable + case ts: TreeSet[A] if ts.ordering == ordering => + if (tree eq null) tree = ts.tree + else tree = RB.union(beforePublish(tree), ts.tree)(ordering) + case ts: TreeMap[A @unchecked, _] if ts.ordering == ordering => + if (tree eq null) tree = ts.tree0 + else tree = RB.union(beforePublish(tree), ts.tree0)(ordering) + case _ => + super.addAll(xs) + } + this + } + + override def clear(): Unit = { + tree = null + } + + override def result(): TreeSet[A] = new TreeSet[A](beforePublish(tree))(ordering) + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/Vector.scala b/tests/pos-special/stdlib/collection/immutable/Vector.scala new file mode 100644 index 000000000000..aa3fac5acd69 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/Vector.scala @@ -0,0 +1,2474 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import java.lang.Math.{abs, max => mmax, min => mmin} +import java.util.Arrays.{copyOf, copyOfRange} +import java.util.{Arrays, Spliterator} + +import scala.annotation.switch +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.VectorInline._ +import scala.collection.immutable.VectorStatics._ +import scala.collection.mutable.ReusableBuilder + + +/** $factoryInfo + * @define Coll `Vector` + * @define coll vector + */ +@SerialVersionUID(3L) +object Vector extends StrictOptimizedSeqFactory[Vector] { + + def empty[A]: Vector[A] = Vector0 + + def from[E](it: collection.IterableOnce[E]): Vector[E] = + it match { + case v: Vector[E] => v + case _ => + val knownSize = it.knownSize + if (knownSize == 0) empty[E] + else if (knownSize > 0 && knownSize <= WIDTH) { + val a1: Arr1 = it match { + case as: ArraySeq.ofRef[_] if as.elemTag.runtimeClass == classOf[AnyRef] => + as.unsafeArray.asInstanceOf[Arr1] + case it: Iterable[E] => + val a1 = new Arr1(knownSize) + it.copyToArray(a1.asInstanceOf[Array[Any]]) + a1 + case _ => + val a1 = new Arr1(knownSize) + it.iterator.copyToArray(a1.asInstanceOf[Array[Any]]) + a1.asInstanceOf[Arr1] + } + new Vector1[E](a1) + } else { + (newBuilder ++= it).result() + } + } + + def newBuilder[A]: ReusableBuilder[A, Vector[A]] = new VectorBuilder[A] + + /** Create a Vector with the same element at each index. + * + * Unlike `fill`, which takes a by-name argument for the value and can thereby + * compute different values for each index, this method guarantees that all + * elements are identical. This allows sparse allocation in O(log n) time and space. + */ + private[collection] def fillSparse[A](n: Int)(elem: A): Vector[A] = { + //TODO Make public; this method is private for now because it is not forward binary compatible + if(n <= 0) Vector0 + else { + val b = new VectorBuilder[A] + b.initSparse(n, elem) + b.result() + } + } + + private val defaultApplyPreferredMaxLength: Int = + try System.getProperty("scala.collection.immutable.Vector.defaultApplyPreferredMaxLength", + "250").toInt + catch { + case _: SecurityException => 250 + } + + private val emptyIterator = new NewVectorIterator(Vector0, 0, 0) +} + + +/** Vector is a general-purpose, immutable data structure. It provides random access and updates + * in O(log n) time, as well as very fast append/prepend/tail/init (amortized O(1), worst case O(log n)). + * Because vectors strike a good balance between fast random selections and fast random functional updates, + * they are currently the default implementation of immutable indexed sequences. + * + * Vectors are implemented by radix-balanced finger trees of width 32. There is a separate subclass + * for each level (0 to 6, with 0 being the empty vector and 6 a tree with a maximum width of 64 at the + * top level). + * + * Tree balancing: + * - Only the first dimension of an array may have a size < WIDTH + * - In a `data` (central) array the first dimension may be up to WIDTH-2 long, in `prefix1` and `suffix1` up + * to WIDTH, and in other `prefix` and `suffix` arrays up to WIDTH-1 + * - `prefix1` and `suffix1` are never empty + * - Balancing does not cross the main data array (i.e. prepending never touches the suffix and appending never touches + * the prefix). The level is increased/decreased when the affected side plus main data is already full/empty + * - All arrays are left-aligned and truncated + * + * In addition to the data slices (`prefix1`, `prefix2`, ..., `dataN`, ..., `suffix2`, `suffix1`) we store a running + * count of elements after each prefix for more efficient indexing without having to dereference all prefix arrays. + */ +sealed abstract class Vector[+A] private[immutable] (private[immutable] final val prefix1: Arr1) + extends AbstractSeq[A] + with IndexedSeq[A] + with IndexedSeqOps[A, Vector, Vector[A]] + with StrictOptimizedSeqOps[A, Vector, Vector[A]] + with IterableFactoryDefaults[A, Vector] + with DefaultSerializable { + + override def iterableFactory: SeqFactory[Vector] = Vector + + override final def length: Int = + if(this.isInstanceOf[BigVector[_]]) this.asInstanceOf[BigVector[_]].length0 + else prefix1.length + + override final def iterator: Iterator[A] = + if(this.isInstanceOf[Vector0.type]) Vector.emptyIterator + else new NewVectorIterator(this, length, vectorSliceCount) + + override final protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Vector[A] = { + var i = 0 + val len = prefix1.length + while (i != len) { + if (pred(prefix1(i).asInstanceOf[A]) == isFlipped) { + // each 1 bit indicates that index passes the filter. + // all indices < i are also assumed to pass the filter + var bitmap = 0 + var j = i + 1 + while (j < len) { + if (pred(prefix1(j).asInstanceOf[A]) != isFlipped) { + bitmap |= (1 << j) + } + j += 1 + } + val newLen = i + java.lang.Integer.bitCount(bitmap) + + if(this.isInstanceOf[BigVector[_]]) { + val b = new VectorBuilder[A] + var k = 0 + while(k < i) { + b.addOne(prefix1(k).asInstanceOf[A]) + k += 1 + } + k = i + 1 + while (i != newLen) { + if (((1 << k) & bitmap) != 0) { + b.addOne(prefix1(k).asInstanceOf[A]) + i += 1 + } + k += 1 + } + this.asInstanceOf[BigVector[A]].foreachRest { v => if(pred(v) != isFlipped) b.addOne(v) } + return b.result() + } else { + if (newLen == 0) return Vector0 + val newData = new Array[AnyRef](newLen) + System.arraycopy(prefix1, 0, newData, 0, i) + var k = i + 1 + while (i != newLen) { + if (((1 << k) & bitmap) != 0) { + newData(i) = prefix1(k) + i += 1 + } + k += 1 + } + return new Vector1[A](newData) + } + } + i += 1 + } + if(this.isInstanceOf[BigVector[_]]) { + val b = new VectorBuilder[A] + b.initFrom(prefix1) + this.asInstanceOf[BigVector[A]].foreachRest { v => if(pred(v) != isFlipped) b.addOne(v) } + b.result() + } else this + } + + // Dummy overrides to refine result types for binary compatibility: + override def updated[B >: A](index: Int, elem: B): Vector[B] = super.updated(index, elem) + override def appended[B >: A](elem: B): Vector[B] = super.appended(elem) + override def prepended[B >: A](elem: B): Vector[B] = super.prepended(elem) + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): Vector[B] = { + val k = prefix.knownSize + if (k == 0) this + else if (k < 0) super.prependedAll(prefix) + else prependedAll0(prefix, k) + } + + override final def appendedAll[B >: A](suffix: collection.IterableOnce[B]): Vector[B] = { + val k = suffix.knownSize + if (k == 0) this + else if (k < 0) super.appendedAll(suffix) + else appendedAll0(suffix, k) + } + + protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = { + // k >= 0, k = prefix.knownSize + val tinyAppendLimit = 4 + vectorSliceCount + if (k < tinyAppendLimit /*|| k < (this.size >>> Log2ConcatFaster)*/) { + var v: Vector[B] = this + val it = IndexedSeq.from(prefix).reverseIterator + while (it.hasNext) v = it.next() +: v + v + } else if (this.size < (k >>> Log2ConcatFaster) && prefix.isInstanceOf[Vector[_]]) { + var v = prefix.asInstanceOf[Vector[B]] + val it = this.iterator + while (it.hasNext) v = v :+ it.next() + v + } else if (k < this.size - AlignToFaster) { + new VectorBuilder[B].alignTo(k, this).addAll(prefix).addAll(this).result() + } else super.prependedAll(prefix) + } + + protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + // k >= 0, k = suffix.knownSize + val tinyAppendLimit = 4 + vectorSliceCount + if (k < tinyAppendLimit) { + var v: Vector[B] = this + suffix match { + case it: Iterable[_] => it.asInstanceOf[Iterable[B]].foreach(x => v = v.appended(x)) + case _ => suffix.iterator.foreach(x => v = v.appended(x)) + } + v + } else if (this.size < (k >>> Log2ConcatFaster) && suffix.isInstanceOf[Vector[_]]) { + var v = suffix.asInstanceOf[Vector[B]] + val ri = this.reverseIterator + while (ri.hasNext) v = v.prepended(ri.next()) + v + } else if (this.size < k - AlignToFaster && suffix.isInstanceOf[Vector[_]]) { + val v = suffix.asInstanceOf[Vector[B]] + new VectorBuilder[B].alignTo(this.size, v).addAll(this).addAll(v).result() + } else new VectorBuilder[B].initFrom(this).addAll(suffix).result() + } + + override def className = "Vector" + + @inline override final def take(n: Int): Vector[A] = slice(0, n) + @inline override final def drop(n: Int): Vector[A] = slice(n, length) + @inline override final def takeRight(n: Int): Vector[A] = slice(length - mmax(n, 0), length) + @inline override final def dropRight(n: Int): Vector[A] = slice(0, length - mmax(n, 0)) + override def tail: Vector[A] = slice(1, length) + override def init: Vector[A] = slice(0, length-1) + + /** Like slice but parameters must be 0 <= lo < hi < length */ + protected[this] def slice0(lo: Int, hi: Int): Vector[A] + + /** Number of slices */ + protected[immutable] def vectorSliceCount: Int + /** Slice at index */ + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] + /** Length of all slices up to and including index */ + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = iterator.copyToArray(xs, start, len) + + override def toVector: Vector[A] = this + + override protected def applyPreferredMaxLength: Int = Vector.defaultApplyPreferredMaxLength + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + val s = shape.shape match { + case StepperShape.IntShape => new IntVectorStepper(iterator.asInstanceOf[NewVectorIterator[Int]]) + case StepperShape.LongShape => new LongVectorStepper(iterator.asInstanceOf[NewVectorIterator[Long]]) + case StepperShape.DoubleShape => new DoubleVectorStepper(iterator.asInstanceOf[NewVectorIterator[Double]]) + case _ => shape.parUnbox(new AnyVectorStepper[A](iterator.asInstanceOf[NewVectorIterator[A]])) + } + s.asInstanceOf[S with EfficientSplit] + } + + protected[this] def ioob(index: Int): IndexOutOfBoundsException = + new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${length-1})") + + override final def head: A = + if (prefix1.length == 0) throw new NoSuchElementException("empty.head") + else prefix1(0).asInstanceOf[A] + + override final def last: A = { + if(this.isInstanceOf[BigVector[_]]) { + val suffix = this.asInstanceOf[BigVector[_]].suffix1 + if(suffix.length == 0) throw new NoSuchElementException("empty.tail") + else suffix(suffix.length-1) + } else prefix1(prefix1.length-1) + }.asInstanceOf[A] + + override final def foreach[U](f: A => U): Unit = { + val c = vectorSliceCount + var i = 0 + while (i < c) { + foreachRec(vectorSliceDim(c, i) - 1, vectorSlice(i), f) + i += 1 + } + } + + // The following definitions are needed for binary compatibility with ParVector + private[collection] def startIndex: Int = 0 + private[collection] def endIndex: Int = length + private[collection] def initIterator[B >: A](s: VectorIterator[B]): Unit = + s.it = iterator.asInstanceOf[NewVectorIterator[B]] +} + + +/** This class only exists because we cannot override `slice` in `Vector` in a binary-compatible way */ +private sealed abstract class VectorImpl[+A](_prefix1: Arr1) extends Vector[A](_prefix1) { + + override final def slice(from: Int, until: Int): Vector[A] = { + val lo = mmax(from, 0) + val hi = mmin(until, length) + if (hi <= lo) Vector0 + else if (hi - lo == length) this + else slice0(lo, hi) + } +} + + +/** Vector with suffix and length fields; all Vector subclasses except Vector1 extend this */ +private sealed abstract class BigVector[+A](_prefix1: Arr1, private[immutable] val suffix1: Arr1, private[immutable] val length0: Int) extends VectorImpl[A](_prefix1) { + + protected[immutable] final def foreachRest[U](f: A => U): Unit = { + val c = vectorSliceCount + var i = 1 + while(i < c) { + foreachRec(vectorSliceDim(c, i)-1, vectorSlice(i), f) + i += 1 + } + } +} + + +/** Empty vector */ +private object Vector0 extends BigVector[Nothing](empty1, empty1, 0) { + + def apply(index: Int): Nothing = throw ioob(index) + + override def updated[B >: Nothing](index: Int, elem: B): Vector[B] = throw ioob(index) + + override def appended[B >: Nothing](elem: B): Vector[B] = new Vector1(wrap1(elem)) + + override def prepended[B >: Nothing](elem: B): Vector[B] = new Vector1(wrap1(elem)) + + override def map[B](f: Nothing => B): Vector[B] = this + + override def tail: Vector[Nothing] = throw new UnsupportedOperationException("empty.tail") + + override def init: Vector[Nothing] = throw new UnsupportedOperationException("empty.init") + + protected[this] def slice0(lo: Int, hi: Int): Vector[Nothing] = this + + protected[immutable] def vectorSliceCount: Int = 0 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = null + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = 0 + + override def equals(o: Any): Boolean = { + if(this eq o.asInstanceOf[AnyRef]) true + else o match { + case that: Vector[_] => false + case o => super.equals(o) + } + } + + override protected[this]def prependedAll0[B >: Nothing](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + Vector.from(prefix) + + override protected[this]def appendedAll0[B >: Nothing](suffix: collection.IterableOnce[B], k: Int): Vector[B] = + Vector.from(suffix) + + override protected[this] def ioob(index: Int): IndexOutOfBoundsException = + new IndexOutOfBoundsException(s"$index is out of bounds (empty vector)") +} + +/** Flat ArraySeq-like structure */ +private final class Vector1[+A](_data1: Arr1) extends VectorImpl[A](_data1) { + + @inline def apply(index: Int): A = { + if(index >= 0 && index < prefix1.length) + prefix1(index).asInstanceOf[A] + else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < prefix1.length) + new Vector1(copyUpdate(prefix1, index, elem)) + else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + val len1 = prefix1.length + if(len1 < WIDTH) new Vector1(copyAppend1(prefix1, elem)) + else new Vector2(prefix1, WIDTH, empty2, wrap1(elem), WIDTH+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + val len1 = prefix1.length + if(len1 < WIDTH) new Vector1(copyPrepend1(elem, prefix1)) + else new Vector2(wrap1(elem), 1, empty2, prefix1, len1+1) + } + + override def map[B](f: A => B): Vector[B] = new Vector1(mapElems1(prefix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = + new Vector1(copyOfRange(prefix1, lo, hi)) + + override def tail: Vector[A] = + if(prefix1.length == 1) Vector0 + else new Vector1(copyTail(prefix1)) + + override def init: Vector[A] = + if(prefix1.length == 1) Vector0 + else new Vector1(copyInit(prefix1)) + + protected[immutable] def vectorSliceCount: Int = 1 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = prefix1 + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = prefix1.length + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case data1b => new Vector1(data1b) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + val data1b = append1IfSpace(prefix1, suffix) + if(data1b ne null) new Vector1(data1b) + else super.appendedAll0(suffix, k) + } +} + + +/** 2-dimensional radix-balanced finger tree */ +private final class Vector2[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val data2: Arr2, + _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + data2: Arr2 = data2, + suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector2(prefix1, len1, data2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len1 + if(io >= 0) { + val i2 = io >>> BITS + val i1 = io & MASK + if(i2 < data2.length) data2(i2)(i1) + else suffix1(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len1) { + val io = index - len1 + val i2 = io >>> BITS + val i1 = io & MASK + if(i2 < data2.length) copy(data2 = copyUpdate(data2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(data2.length < WIDTH-2) copy(data2 = copyAppend(data2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else new Vector3(prefix1, len1, data2, WIDTH*(WIDTH-2) + len1, empty3, wrap2(suffix1), wrap1(elem), length0+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, length0 = length0+1) + else if(data2.length < WIDTH-2) copy(wrap1(elem), 1, copyPrepend(prefix1, data2), length0 = length0+1) + else new Vector3(wrap1(elem), 1, wrap2(prefix1), len1+1, empty3, data2, suffix1, length0+1) + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), data2 = mapElems(2, data2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, data2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(copyTail(prefix1), len1-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 3 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => data2 + case 2 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => length0 - suffix1.length + case 2 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** 3-dimensional radix-balanced finger tree */ +private final class Vector3[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, + private[immutable] val data3: Arr3, + private[immutable] val suffix2: Arr2, _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + prefix2: Arr2 = prefix2, len12: Int = len12, + data3: Arr3 = data3, + suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector3(prefix1, len1, prefix2, len12, data3, suffix2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len12 + if(io >= 0) { + val i3 = io >>> BITS2 + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if(i3 < data3.length) data3(i3)(i2)(i1) + else if(i2 < suffix2.length) suffix2(i2)(i1) + else suffix1(i1) + } else if(index >= len1) { + val io = index - len1 + prefix2(io >>> BITS)(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len12) { + val io = index - len12 + val i3 = io >>> BITS2 + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if (i3 < data3.length ) copy(data3 = copyUpdate(data3, i3, i2, i1, elem)) + else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else if(index >= len1) { + val io = index - len1 + copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else if(data3.length < WIDTH-2) copy(data3 = copyAppend(data3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else new Vector4(prefix1, len1, prefix2, len12, data3, (WIDTH-2)*WIDTH2 + len12, empty4, wrap3(copyAppend(suffix2, suffix1)), empty2, wrap1(elem), length0+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(prefix1 = copyPrepend1(elem, prefix1), len1 = len1+1, len12 = len12+1, length0 = length0+1) + else if(len12 < WIDTH2 ) copy(prefix1 = wrap1(elem), len1 = 1, prefix2 = copyPrepend(prefix1, prefix2), len12 = len12+1, length0 = length0+1) + else if(data3.length < WIDTH-2) copy(prefix1 = wrap1(elem), len1 = 1, prefix2 = empty2, len12 = 1, data3 = copyPrepend(copyPrepend(prefix1, prefix2), data3), length0 = length0+1) + else new Vector4(wrap1(elem), 1, empty2, 1, wrap3(copyPrepend(prefix1, prefix2)), len12+1, empty4, data3, suffix2, suffix1, length0+1) + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), + data3 = mapElems(3, data3, f), + suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, prefix2) + b.consider(3, data3) + b.consider(2, suffix2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(prefix1 = copyTail(prefix1), len1 = len1-1, len12 = len12-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 5 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => prefix2 + case 2 => data3 + case 3 => suffix2 + case 4 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => len12 + case 2 => len12 + data3.length*WIDTH2 + case 3 => length0 - suffix1.length + case 4 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + len12 = len12 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** 4-dimensional radix-balanced finger tree */ +private final class Vector4[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, + private[immutable] val prefix3: Arr3, private[immutable] val len123: Int, + private[immutable] val data4: Arr4, + private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + prefix2: Arr2 = prefix2, len12: Int = len12, + prefix3: Arr3 = prefix3, len123: Int = len123, + data4: Arr4 = data4, + suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector4(prefix1, len1, prefix2, len12, prefix3, len123, data4, suffix3, suffix2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len123 + if(io >= 0) { + val i4 = io >>> BITS3 + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if(i4 < data4.length) data4(i4)(i3)(i2)(i1) + else if(i3 < suffix3.length) suffix3(i3)(i2)(i1) + else if(i2 < suffix2.length) suffix2(i2)(i1) + else suffix1(i1) + } else if(index >= len12) { + val io = index - len12 + prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len1) { + val io = index - len1 + prefix2(io >>> BITS)(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len123) { + val io = index - len123 + val i4 = io >>> BITS3 + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if (i4 < data4.length ) copy(data4 = copyUpdate(data4, i4, i3, i2, i1, elem)) + else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem)) + else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else if(index >= len12) { + val io = index - len12 + copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len1) { + val io = index - len1 + copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix3.length < WIDTH-1) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(data4.length < WIDTH-2) copy(data4 = copyAppend(data4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, data4, (WIDTH-2)*WIDTH3 + len123, empty5, wrap4(copyAppend(suffix3, copyAppend(suffix2, suffix1))), empty3, empty2, wrap1(elem), length0+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, length0 = length0+1) + else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, length0 = length0+1) + else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, length0 = length0+1) + else if(data4.length < WIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), data4), length0 = length0+1) + else new Vector5(wrap1(elem), 1, empty2, 1, empty3, 1, wrap4(copyPrepend(copyPrepend(prefix1, prefix2), prefix3)), len123+1, empty5, data4, suffix3, suffix2, suffix1, length0+1) + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), + data4 = mapElems(4, data4, f), + suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, prefix2) + b.consider(3, prefix3) + b.consider(4, data4) + b.consider(3, suffix3) + b.consider(2, suffix2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 7 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => prefix2 + case 2 => prefix3 + case 3 => data4 + case 4 => suffix3 + case 5 => suffix2 + case 6 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => len12 + case 2 => len123 + case 3 => len123 + data4.length*WIDTH3 + case 4 => len123 + data4.length*WIDTH3 + suffix3.length*WIDTH2 + case 5 => length0 - suffix1.length + case 6 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + len12 = len12 + diff, + len123 = len123 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** 5-dimensional radix-balanced finger tree */ +private final class Vector5[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, + private[immutable] val prefix3: Arr3, private[immutable] val len123: Int, + private[immutable] val prefix4: Arr4, private[immutable] val len1234: Int, + private[immutable] val data5: Arr5, + private[immutable] val suffix4: Arr4, private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + prefix2: Arr2 = prefix2, len12: Int = len12, + prefix3: Arr3 = prefix3, len123: Int = len123, + prefix4: Arr4 = prefix4, len1234: Int = len1234, + data5: Arr5 = data5, + suffix4: Arr4 = suffix4, suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, suffix4, suffix3, suffix2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len1234 + if(io >= 0) { + val i5 = io >>> BITS4 + val i4 = (io >>> BITS3) & MASK + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if(i5 < data5.length) data5(i5)(i4)(i3)(i2)(i1) + else if(i4 < suffix4.length) suffix4(i4)(i3)(i2)(i1) + else if(i3 < suffix3.length) suffix3(i3)(i2)(i1) + else if(i2 < suffix2.length) suffix2(i2)(i1) + else suffix1(i1) + } else if(index >= len123) { + val io = index - len123 + prefix4(io >>> BITS3)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len12) { + val io = index - len12 + prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len1) { + val io = index - len1 + prefix2(io >>> BITS)(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len1234) { + val io = index - len1234 + val i5 = io >>> BITS4 + val i4 = (io >>> BITS3) & MASK + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if (i5 < data5.length ) copy(data5 = copyUpdate(data5, i5, i4, i3, i2, i1, elem)) + else if(i4 < suffix4.length) copy(suffix4 = copyUpdate(suffix4, i4, i3, i2, i1, elem)) + else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem)) + else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else if(index >= len123) { + val io = index - len123 + copy(prefix4 = copyUpdate(prefix4, io >>> BITS3, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len12) { + val io = index - len12 + copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len1) { + val io = index - len1 + copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix3.length < WIDTH-1) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix4.length < WIDTH-1) copy(suffix4 = copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(data5.length < WIDTH-2) copy(data5 = copyAppend(data5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, (WIDTH-2)*WIDTH4 + len1234, empty6, wrap5(copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), empty4, empty3, empty2, wrap1(elem), length0+1) + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, len1234 = len1234+1, length0 = length0+1) + else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, len1234 = len1234+1, length0 = length0+1) + else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, len1234 = len1234+1, length0 = length0+1) + else if(len1234 < WIDTH4 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), len1234+1, length0 = length0+1) + else if(data5.length < WIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), data5), length0 = length0+1) + else new Vector6(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, wrap5(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4)), len1234+1, empty6, data5, suffix4, suffix3, suffix2, suffix1, length0+1) + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), prefix4 = mapElems(4, prefix4, f), + data5 = mapElems(5, data5, f), + suffix4 = mapElems(4, suffix4, f), suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, prefix2) + b.consider(3, prefix3) + b.consider(4, prefix4) + b.consider(5, data5) + b.consider(4, suffix4) + b.consider(3, suffix3) + b.consider(2, suffix2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, len1234 = len1234-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 9 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => prefix2 + case 2 => prefix3 + case 3 => prefix4 + case 4 => data5 + case 5 => suffix4 + case 6 => suffix3 + case 7 => suffix2 + case 8 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => len12 + case 2 => len123 + case 3 => len1234 + case 4 => len1234 + data5.length*WIDTH4 + case 5 => len1234 + data5.length*WIDTH4 + suffix4.length*WIDTH3 + case 6 => len1234 + data5.length*WIDTH4 + suffix4.length*WIDTH3 + suffix3.length*WIDTH2 + case 7 => length0 - suffix1.length + case 8 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + len12 = len12 + diff, + len123 = len123 + diff, + len1234 = len1234 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** 6-dimensional radix-balanced finger tree */ +private final class Vector6[+A](_prefix1: Arr1, private[immutable] val len1: Int, + private[immutable] val prefix2: Arr2, private[immutable] val len12: Int, + private[immutable] val prefix3: Arr3, private[immutable] val len123: Int, + private[immutable] val prefix4: Arr4, private[immutable] val len1234: Int, + private[immutable] val prefix5: Arr5, private[immutable] val len12345: Int, + private[immutable] val data6: Arr6, + private[immutable] val suffix5: Arr5, private[immutable] val suffix4: Arr4, private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1, + _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) { + + @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1, + prefix2: Arr2 = prefix2, len12: Int = len12, + prefix3: Arr3 = prefix3, len123: Int = len123, + prefix4: Arr4 = prefix4, len1234: Int = len1234, + prefix5: Arr5 = prefix5, len12345: Int = len12345, + data6: Arr6 = data6, + suffix5: Arr5 = suffix5, suffix4: Arr4 = suffix4, suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1, + length0: Int = length0) = + new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data6, suffix5, suffix4, suffix3, suffix2, suffix1, length0) + + @inline def apply(index: Int): A = { + if(index >= 0 && index < length0) { + val io = index - len12345 + if(io >= 0) { + val i6 = io >>> BITS5 + val i5 = (io >>> BITS4) & MASK + val i4 = (io >>> BITS3) & MASK + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if(i6 < data6.length) data6(i6)(i5)(i4)(i3)(i2)(i1) + else if(i5 < suffix5.length) suffix5(i5)(i4)(i3)(i2)(i1) + else if(i4 < suffix4.length) suffix4(i4)(i3)(i2)(i1) + else if(i3 < suffix3.length) suffix3(i3)(i2)(i1) + else if(i2 < suffix2.length) suffix2(i2)(i1) + else suffix1(i1) + } else if(index >= len1234) { + val io = index - len1234 + prefix5(io >>> BITS4)((io >>> BITS3) & MASK)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len123) { + val io = index - len123 + prefix4(io >>> BITS3)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len12) { + val io = index - len12 + prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK) + } else if(index >= len1) { + val io = index - len1 + prefix2(io >>> BITS)(io & MASK) + } else prefix1(index) + }.asInstanceOf[A] else throw ioob(index) + } + + override def updated[B >: A](index: Int, elem: B): Vector[B] = { + if(index >= 0 && index < length0) { + if(index >= len12345) { + val io = index - len12345 + val i6 = io >>> BITS5 + val i5 = (io >>> BITS4) & MASK + val i4 = (io >>> BITS3) & MASK + val i3 = (io >>> BITS2) & MASK + val i2 = (io >>> BITS) & MASK + val i1 = io & MASK + if (i6 < data6.length ) copy(data6 = copyUpdate(data6, i6, i5, i4, i3, i2, i1, elem)) + else if(i5 < suffix5.length) copy(suffix5 = copyUpdate(suffix5, i5, i4, i3, i2, i1, elem)) + else if(i4 < suffix4.length) copy(suffix4 = copyUpdate(suffix4, i4, i3, i2, i1, elem)) + else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem)) + else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem)) + else copy(suffix1 = copyUpdate(suffix1, i1, elem)) + } else if(index >= len1234) { + val io = index - len1234 + copy(prefix5 = copyUpdate(prefix5, io >>> BITS4, (io >>> BITS3) & MASK, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len123) { + val io = index - len123 + copy(prefix4 = copyUpdate(prefix4, io >>> BITS3, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len12) { + val io = index - len12 + copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem)) + } else if(index >= len1) { + val io = index - len1 + copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem)) + } else { + copy(prefix1 = copyUpdate(prefix1, index, elem)) + } + } else throw ioob(index) + } + + override def appended[B >: A](elem: B): Vector[B] = { + if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1) + else if(suffix2.length < WIDTH-1 ) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix3.length < WIDTH-1 ) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix4.length < WIDTH-1 ) copy(suffix4 = copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(suffix5.length < WIDTH-1 ) copy(suffix5 = copyAppend(suffix5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else if(data6.length < LASTWIDTH-2) copy(data6 = copyAppend(data6, copyAppend(suffix5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))))), suffix5 = empty5, suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1) + else throw new IllegalArgumentException + } + + override def prepended[B >: A](elem: B): Vector[B] = { + if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1) + else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1) + else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1) + else if(len1234 < WIDTH4 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), len1234+1, len12345 = len12345+1, length0 = length0+1) + else if(len12345 < WIDTH5 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), prefix5), len12345+1, length0 = length0+1) + else if(data6.length < LASTWIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, empty5, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), prefix5), data6), length0 = length0+1) + else throw new IllegalArgumentException + } + + override def map[B](f: A => B): Vector[B] = + copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), prefix4 = mapElems(4, prefix4, f), prefix5 = mapElems(5, prefix5, f), + data6 = mapElems(6, data6, f), + suffix5 = mapElems(5, suffix5, f), suffix4 = mapElems(4, suffix4, f), suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f)) + + protected[this] def slice0(lo: Int, hi: Int): Vector[A] = { + val b = new VectorSliceBuilder(lo, hi) + b.consider(1, prefix1) + b.consider(2, prefix2) + b.consider(3, prefix3) + b.consider(4, prefix4) + b.consider(5, prefix5) + b.consider(6, data6) + b.consider(5, suffix5) + b.consider(4, suffix4) + b.consider(3, suffix3) + b.consider(2, suffix2) + b.consider(1, suffix1) + b.result() + } + + override def tail: Vector[A] = + if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, len1234 = len1234-1, len12345 = len12345-1, length0 = length0-1) + else slice0(1, length0) + + override def init: Vector[A] = + if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1) + else slice0(0, length0-1) + + protected[immutable] def vectorSliceCount: Int = 11 + protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match { + case 0 => prefix1 + case 1 => prefix2 + case 2 => prefix3 + case 3 => prefix4 + case 4 => prefix5 + case 5 => data6 + case 6 => suffix5 + case 7 => suffix4 + case 8 => suffix3 + case 9 => suffix2 + case 10 => suffix1 + } + protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match { + case 0 => len1 + case 1 => len12 + case 2 => len123 + case 3 => len1234 + case 4 => len12345 + case 5 => len12345 + data6.length*WIDTH5 + case 6 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 + case 7 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 + suffix4.length*WIDTH3 + case 8 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 + suffix4.length*WIDTH3 + suffix3.length*WIDTH2 + case 9 => length0 - suffix1.length + case 10 => length0 + } + + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + prepend1IfSpace(prefix1, prefix) match { + case null => super.prependedAll0(prefix, k) + case prefix1b => + val diff = prefix1b.length - prefix1.length + copy(prefix1 = prefix1b, + len1 = len1 + diff, + len12 = len12 + diff, + len123 = len123 + diff, + len1234 = len1234 + diff, + len12345 = len12345 + diff, + length0 = length0 + diff, + ) + } + + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + val suffix1b = append1IfSpace(suffix1, suffix) + if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) + else super.appendedAll0(suffix, k) + } +} + + +/** Helper class for vector slicing. It is initialized with the validated start and end index, + * then the vector slices are added in succession with `consider`. No matter what the dimension + * of the originating vector is or where the cut is performed, this always results in a + * structure with the highest-dimensional data in the middle and fingers of decreasing dimension + * at both ends, which can be turned into a new vector with very little rebalancing. + */ +private final class VectorSliceBuilder(lo: Int, hi: Int) { + //println(s"***** VectorSliceBuilder($lo, $hi)") + + private[this] val slices = new Array[Array[AnyRef]](11) + private[this] var len, pos, maxDim = 0 + + @inline private[this] def prefixIdx(n: Int) = n-1 + @inline private[this] def suffixIdx(n: Int) = 11-n + + def consider[T <: AnyRef](n: Int, a: Array[T]): Unit = { + //println(s"***** consider($n, /${a.length})") + val count = a.length * (1 << (BITS*(n-1))) + val lo0 = mmax(lo-pos, 0) + val hi0 = mmin(hi-pos, count) + if(hi0 > lo0) { + addSlice(n, a, lo0, hi0) + len += (hi0 - lo0) + } + pos += count + } + + private[this] def addSlice[T <: AnyRef](n: Int, a: Array[T], lo: Int, hi: Int): Unit = { + //println(s"***** addSlice($n, /${a.length}, $lo, $hi)") + if(n == 1) { + add(1, copyOrUse(a, lo, hi)) + } else { + val bitsN = BITS * (n-1) + val widthN = 1 << bitsN + val loN = lo >>> bitsN + val hiN = hi >>> bitsN + val loRest = lo & (widthN - 1) + val hiRest = hi & (widthN - 1) + //println(s"***** bitsN=$bitsN, loN=$loN, hiN=$hiN, loRest=$loRest, hiRest=$hiRest") + if(loRest == 0) { + if(hiRest == 0) { + add(n, copyOrUse(a, loN, hiN)) + } else { + if(hiN > loN) add(n, copyOrUse(a, loN, hiN)) + addSlice(n-1, a(hiN).asInstanceOf[Array[AnyRef]], 0, hiRest) + } + } else { + if(hiN == loN) { + addSlice(n-1, a(loN).asInstanceOf[Array[AnyRef]], loRest, hiRest) + } else { + addSlice(n-1, a(loN).asInstanceOf[Array[AnyRef]], loRest, widthN) + if(hiRest == 0) { + if(hiN > loN+1) add(n, copyOrUse(a, loN+1, hiN)) + } else { + if(hiN > loN+1) add(n, copyOrUse(a, loN+1, hiN)) + addSlice(n-1, a(hiN).asInstanceOf[Array[AnyRef]], 0, hiRest) + } + } + } + } + } + + private[this] def add[T <: AnyRef](n: Int, a: Array[T]): Unit = { + //println(s"***** add($n, /${a.length})") + val idx = + if(n <= maxDim) suffixIdx(n) + else { maxDim = n; prefixIdx(n) } + slices(idx) = a.asInstanceOf[Array[AnyRef]] + } + + def result[A](): Vector[A] = { + //println(s"***** result: $len, $maxDim") + if(len <= 32) { + if(len == 0) Vector0 + else { + val prefix1 = slices(prefixIdx(1)) + val suffix1 = slices(suffixIdx(1)) + //println(s"***** prefix1: ${if(prefix1 == null) "null" else prefix1.mkString("[", ",", "]")}, suffix1: ${if(suffix1 == null) "null" else suffix1.mkString("[", ",", "]")}") + val a: Arr1 = + if(prefix1 ne null) { + if(suffix1 ne null) concatArrays(prefix1, suffix1) + else prefix1 + } else if(suffix1 ne null) suffix1 + else { + val prefix2 = slices(prefixIdx(2)).asInstanceOf[Arr2] + if(prefix2 ne null) prefix2(0) + else { + val suffix2 = slices(suffixIdx(2)).asInstanceOf[Arr2] + suffix2(0) + } + } + new Vector1(a) + } + } else { + balancePrefix(1) + balanceSuffix(1) + var resultDim = maxDim + if(resultDim < 6) { + val pre = slices(prefixIdx(maxDim)) + val suf = slices(suffixIdx(maxDim)) + if((pre ne null) && (suf ne null)) { + // The highest-dimensional data consists of two slices: concatenate if they fit into the main data array, + // otherwise increase the dimension + if(pre.length + suf.length <= WIDTH-2) { + slices(prefixIdx(maxDim)) = concatArrays(pre, suf) + slices(suffixIdx(maxDim)) = null + } else resultDim += 1 + } else { + // A single highest-dimensional slice could have length WIDTH-1 if it came from a prefix or suffix but we + // only allow WIDTH-2 for the main data, so increase the dimension in this case + val one = if(pre ne null) pre else suf + if(one.length > WIDTH-2) resultDim += 1 + } + } + val prefix1 = slices(prefixIdx(1)) + val suffix1 = slices(suffixIdx(1)) + val len1 = prefix1.length + val res = (resultDim: @switch) match { + case 2 => + val data2 = dataOr(2, empty2) + new Vector2[A](prefix1, len1, data2, suffix1, len) + case 3 => + val prefix2 = prefixOr(2, empty2) + val data3 = dataOr(3, empty3) + val suffix2 = suffixOr(2, empty2) + val len12 = len1 + (prefix2.length * WIDTH) + new Vector3[A](prefix1, len1, prefix2, len12, data3, suffix2, suffix1, len) + case 4 => + val prefix2 = prefixOr(2, empty2) + val prefix3 = prefixOr(3, empty3) + val data4 = dataOr(4, empty4) + val suffix3 = suffixOr(3, empty3) + val suffix2 = suffixOr(2, empty2) + val len12 = len1 + (prefix2.length * WIDTH) + val len123 = len12 + (prefix3.length * WIDTH2) + new Vector4[A](prefix1, len1, prefix2, len12, prefix3, len123, data4, suffix3, suffix2, suffix1, len) + case 5 => + val prefix2 = prefixOr(2, empty2) + val prefix3 = prefixOr(3, empty3) + val prefix4 = prefixOr(4, empty4) + val data5 = dataOr(5, empty5) + val suffix4 = suffixOr(4, empty4) + val suffix3 = suffixOr(3, empty3) + val suffix2 = suffixOr(2, empty2) + val len12 = len1 + (prefix2.length * WIDTH) + val len123 = len12 + (prefix3.length * WIDTH2) + val len1234 = len123 + (prefix4.length * WIDTH3) + new Vector5[A](prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, suffix4, suffix3, suffix2, suffix1, len) + case 6 => + val prefix2 = prefixOr(2, empty2) + val prefix3 = prefixOr(3, empty3) + val prefix4 = prefixOr(4, empty4) + val prefix5 = prefixOr(5, empty5) + val data6 = dataOr(6, empty6) + val suffix5 = suffixOr(5, empty5) + val suffix4 = suffixOr(4, empty4) + val suffix3 = suffixOr(3, empty3) + val suffix2 = suffixOr(2, empty2) + val len12 = len1 + (prefix2.length * WIDTH) + val len123 = len12 + (prefix3.length * WIDTH2) + val len1234 = len123 + (prefix4.length * WIDTH3) + val len12345 = len1234 + (prefix5.length * WIDTH4) + new Vector6[A](prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data6, suffix5, suffix4, suffix3, suffix2, suffix1, len) + } + res + } + } + + @inline private[this] def prefixOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = { + val p = slices(prefixIdx(n)) + if(p ne null) p.asInstanceOf[Array[T]] else a + } + + @inline private[this] def suffixOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = { + val s = slices(suffixIdx(n)) + if(s ne null) s.asInstanceOf[Array[T]] else a + } + + @inline private[this] def dataOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = { + val p = slices(prefixIdx(n)) + if(p ne null) p.asInstanceOf[Array[T]] + else { + val s = slices(suffixIdx(n)) + if(s ne null) s.asInstanceOf[Array[T]] else a + } + } + + /** Ensure prefix is not empty */ + private[this] def balancePrefix(n: Int): Unit = { + if(slices(prefixIdx(n)) eq null) { + if(n == maxDim) { + slices(prefixIdx(n)) = slices(suffixIdx(n)) + slices(suffixIdx(n)) = null + } else { + balancePrefix(n+1) + val preN1 = slices(prefixIdx(n+1)).asInstanceOf[Array[Array[AnyRef]]] + //assert(preN1 ne null) + slices(prefixIdx(n)) = preN1(0) + if(preN1.length == 1) { + slices(prefixIdx(n+1)) = null + if((maxDim == n+1) && (slices(suffixIdx(n+1)) eq null)) maxDim = n + } else { + slices(prefixIdx(n+1)) = copyOfRange(preN1, 1, preN1.length).asInstanceOf[Array[AnyRef]] + } + } + } + } + + /** Ensure suffix is not empty */ + private[this] def balanceSuffix(n: Int): Unit = { + if(slices(suffixIdx(n)) eq null) { + if(n == maxDim) { + slices(suffixIdx(n)) = slices(prefixIdx(n)) + slices(prefixIdx(n)) = null + } else { + balanceSuffix(n+1) + val sufN1 = slices(suffixIdx(n+1)).asInstanceOf[Array[Array[AnyRef]]] + //assert(sufN1 ne null, s"n=$n, maxDim=$maxDim, slices=${slices.mkString(",")}") + slices(suffixIdx(n)) = sufN1(sufN1.length-1) + if(sufN1.length == 1) { + slices(suffixIdx(n+1)) = null + if((maxDim == n+1) && (slices(prefixIdx(n+1)) eq null)) maxDim = n + } else { + slices(suffixIdx(n+1)) = copyOfRange(sufN1, 0, sufN1.length-1).asInstanceOf[Array[AnyRef]] + } + } + } + } + + override def toString: String = + s"VectorSliceBuilder(lo=$lo, hi=$hi, len=$len, pos=$pos, maxDim=$maxDim)" + + private[immutable] def getSlices: Array[Array[AnyRef]] = slices +} + + +final class VectorBuilder[A] extends ReusableBuilder[A, Vector[A]] { + + private[this] var a6: Arr6 = _ + private[this] var a5: Arr5 = _ + private[this] var a4: Arr4 = _ + private[this] var a3: Arr3 = _ + private[this] var a2: Arr2 = _ + private[this] var a1: Arr1 = new Arr1(WIDTH) + private[this] var len1, lenRest, offset = 0 + private[this] var prefixIsRightAligned = false + private[this] var depth = 1 + + @inline private[this] final def setLen(i: Int): Unit = { + len1 = i & MASK + lenRest = i - len1 + } + + override def knownSize: Int = len1 + lenRest - offset + + @inline def size: Int = knownSize + @inline def isEmpty: Boolean = knownSize == 0 + @inline def nonEmpty: Boolean = knownSize != 0 + + def clear(): Unit = { + a6 = null + a5 = null + a4 = null + a3 = null + a2 = null + a1 = new Arr1(WIDTH) + len1 = 0 + lenRest = 0 + offset = 0 + prefixIsRightAligned = false + depth = 1 + } + + private[immutable] def initSparse(size: Int, elem: A): Unit = { + setLen(size) + Arrays.fill(a1, elem) + if(size > WIDTH) { + a2 = new Array(WIDTH) + Arrays.fill(a2.asInstanceOf[Array[AnyRef]], a1) + if(size > WIDTH2) { + a3 = new Array(WIDTH) + Arrays.fill(a3.asInstanceOf[Array[AnyRef]], a2) + if(size > WIDTH3) { + a4 = new Array(WIDTH) + Arrays.fill(a4.asInstanceOf[Array[AnyRef]], a3) + if(size > WIDTH4) { + a5 = new Array(WIDTH) + Arrays.fill(a5.asInstanceOf[Array[AnyRef]], a4) + if(size > WIDTH5) { + a6 = new Array(LASTWIDTH) + Arrays.fill(a6.asInstanceOf[Array[AnyRef]], a5) + depth = 6 + } else depth = 5 + } else depth = 4 + } else depth = 3 + } else depth = 2 + } else depth = 1 + } + + private[immutable] def initFrom(prefix1: Arr1): Unit = { + depth = 1 + setLen(prefix1.length) + a1 = copyOrUse(prefix1, 0, WIDTH) + if(len1 == 0 && lenRest > 0) { + // force advance() on next addition: + len1 = WIDTH + lenRest -= WIDTH + } + } + + private[immutable] def initFrom(v: Vector[_]): this.type = { + (v.vectorSliceCount: @switch) match { + case 0 => + case 1 => + val v1 = v.asInstanceOf[Vector1[_]] + depth = 1 + setLen(v1.prefix1.length) + a1 = copyOrUse(v1.prefix1, 0, WIDTH) + case 3 => + val v2 = v.asInstanceOf[Vector2[_]] + val d2 = v2.data2 + a1 = copyOrUse(v2.suffix1, 0, WIDTH) + depth = 2 + offset = WIDTH - v2.len1 + setLen(v2.length0 + offset) + a2 = new Arr2(WIDTH) + a2(0) = v2.prefix1 + System.arraycopy(d2, 0, a2, 1, d2.length) + a2(d2.length+1) = a1 + case 5 => + val v3 = v.asInstanceOf[Vector3[_]] + val d3 = v3.data3 + val s2 = v3.suffix2 + a1 = copyOrUse(v3.suffix1, 0, WIDTH) + depth = 3 + offset = WIDTH2 - v3.len12 + setLen(v3.length0 + offset) + a3 = new Arr3(WIDTH) + a3(0) = copyPrepend(v3.prefix1, v3.prefix2) + System.arraycopy(d3, 0, a3, 1, d3.length) + a2 = copyOf(s2, WIDTH) + a3(d3.length+1) = a2 + a2(s2.length) = a1 + case 7 => + val v4 = v.asInstanceOf[Vector4[_]] + val d4 = v4.data4 + val s3 = v4.suffix3 + val s2 = v4.suffix2 + a1 = copyOrUse(v4.suffix1, 0, WIDTH) + depth = 4 + offset = WIDTH3 - v4.len123 + setLen(v4.length0 + offset) + a4 = new Arr4(WIDTH) + a4(0) = copyPrepend(copyPrepend(v4.prefix1, v4.prefix2), v4.prefix3) + System.arraycopy(d4, 0, a4, 1, d4.length) + a3 = copyOf(s3, WIDTH) + a2 = copyOf(s2, WIDTH) + a4(d4.length+1) = a3 + a3(s3.length) = a2 + a2(s2.length) = a1 + case 9 => + val v5 = v.asInstanceOf[Vector5[_]] + val d5 = v5.data5 + val s4 = v5.suffix4 + val s3 = v5.suffix3 + val s2 = v5.suffix2 + a1 = copyOrUse(v5.suffix1, 0, WIDTH) + depth = 5 + offset = WIDTH4 - v5.len1234 + setLen(v5.length0 + offset) + a5 = new Arr5(WIDTH) + a5(0) = copyPrepend(copyPrepend(copyPrepend(v5.prefix1, v5.prefix2), v5.prefix3), v5.prefix4) + System.arraycopy(d5, 0, a5, 1, d5.length) + a4 = copyOf(s4, WIDTH) + a3 = copyOf(s3, WIDTH) + a2 = copyOf(s2, WIDTH) + a5(d5.length+1) = a4 + a4(s4.length) = a3 + a3(s3.length) = a2 + a2(s2.length) = a1 + case 11 => + val v6 = v.asInstanceOf[Vector6[_]] + val d6 = v6.data6 + val s5 = v6.suffix5 + val s4 = v6.suffix4 + val s3 = v6.suffix3 + val s2 = v6.suffix2 + a1 = copyOrUse(v6.suffix1, 0, WIDTH) + depth = 6 + offset = WIDTH5 - v6.len12345 + setLen(v6.length0 + offset) + a6 = new Arr6(LASTWIDTH) + a6(0) = copyPrepend(copyPrepend(copyPrepend(copyPrepend(v6.prefix1, v6.prefix2), v6.prefix3), v6.prefix4), v6.prefix5) + System.arraycopy(d6, 0, a6, 1, d6.length) + a5 = copyOf(s5, WIDTH) + a4 = copyOf(s4, WIDTH) + a3 = copyOf(s3, WIDTH) + a2 = copyOf(s2, WIDTH) + a6(d6.length+1) = a5 + a5(s5.length) = a4 + a4(s4.length) = a3 + a3(s3.length) = a2 + a2(s2.length) = a1 + } + if(len1 == 0 && lenRest > 0) { + // force advance() on next addition: + len1 = WIDTH + lenRest -= WIDTH + } + this + } + + //TODO Make public; this method is only private for binary compatibility + private[collection] def alignTo(before: Int, bigVector: Vector[A]): this.type = { + if (len1 != 0 || lenRest != 0) + throw new UnsupportedOperationException("A non-empty VectorBuilder cannot be aligned retrospectively. Please call .reset() or use a new VectorBuilder.") + val (prefixLength, maxPrefixLength) = bigVector match { + case Vector0 => (0, 1) + case v1: Vector1[_] => (0, 1) + case v2: Vector2[_] => (v2.len1, WIDTH) + case v3: Vector3[_] => (v3.len12, WIDTH2) + case v4: Vector4[_] => (v4.len123, WIDTH3) + case v5: Vector5[_] => (v5.len1234, WIDTH4) + case v6: Vector6[_] => (v6.len12345, WIDTH5) + } + if (maxPrefixLength == 1) return this // does not really make sense to align for <= 32 element-vector + val overallPrefixLength = (before + prefixLength) % maxPrefixLength + offset = (maxPrefixLength - overallPrefixLength) % maxPrefixLength + // pretend there are already `offset` elements added + advanceN(offset & ~MASK) + len1 = offset & MASK + prefixIsRightAligned = true + this + } + + /** + * Removes `offset` leading `null`s in the prefix. + * This is needed after calling `alignTo` and subsequent additions, + * directly before the result is used for creating a new Vector. + * Note that the outermost array keeps its length to keep the + * Builder re-usable. + * + * example: + * a2 = Array(null, ..., null, Array(null, .., null, 0, 1, .., x), Array(x+1, .., x+32), ...) + * becomes + * a2 = Array(Array(0, 1, .., x), Array(x+1, .., x+32), ..., ?, ..., ?) + */ + private[this] def leftAlignPrefix(): Unit = { + @inline def shrinkOffsetIfToLarge(width: Int): Unit = { + val newOffset = offset % width + lenRest -= offset - newOffset + offset = newOffset + } + var a: Array[AnyRef] = null // the array we modify + var aParent: Array[AnyRef] = null // a's parent, so aParent(0) == a + if (depth >= 6) { + a = a6.asInstanceOf[Array[AnyRef]] + val i = offset >>> BITS5 + if (i > 0) System.arraycopy(a, i, a, 0, LASTWIDTH - i) + shrinkOffsetIfToLarge(WIDTH5) + if ((lenRest >>> BITS5) == 0) depth = 5 + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 5) { + if (a == null) a = a5.asInstanceOf[Array[AnyRef]] + val i = (offset >>> BITS4) & MASK + if (depth == 5) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a5 = a.asInstanceOf[Arr5] + shrinkOffsetIfToLarge(WIDTH4) + if ((lenRest >>> BITS4) == 0) depth = 4 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 4) { + if (a == null) a = a4.asInstanceOf[Array[AnyRef]] + val i = (offset >>> BITS3) & MASK + if (depth == 4) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a4 = a.asInstanceOf[Arr4] + shrinkOffsetIfToLarge(WIDTH3) + if ((lenRest >>> BITS3) == 0) depth = 3 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 3) { + if (a == null) a = a3.asInstanceOf[Array[AnyRef]] + val i = (offset >>> BITS2) & MASK + if (depth == 3) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a3 = a.asInstanceOf[Arr3] + shrinkOffsetIfToLarge(WIDTH2) + if ((lenRest >>> BITS2) == 0) depth = 2 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 2) { + if (a == null) a = a2.asInstanceOf[Array[AnyRef]] + val i = (offset >>> BITS) & MASK + if (depth == 2) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a2 = a.asInstanceOf[Arr2] + shrinkOffsetIfToLarge(WIDTH) + if ((lenRest >>> BITS) == 0) depth = 1 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + aParent = a + a = a(0).asInstanceOf[Array[AnyRef]] + } + if (depth >= 1) { + if (a == null) a = a1.asInstanceOf[Array[AnyRef]] + val i = offset & MASK + if (depth == 1) { + if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i) + a1 = a.asInstanceOf[Arr1] + len1 -= offset + offset = 0 + } else { + if (i > 0) a = copyOfRange(a, i, WIDTH) + aParent(0) = a + } + } + prefixIsRightAligned = false + } + + def addOne(elem: A): this.type = { + if(len1 == WIDTH) advance() + a1(len1) = elem.asInstanceOf[AnyRef] + len1 += 1 + this + } + + private[this] def addArr1(data: Arr1): Unit = { + val dl = data.length + if(dl > 0) { + if(len1 == WIDTH) advance() + val copy1 = mmin(WIDTH-len1, dl) + val copy2 = dl - copy1 + System.arraycopy(data, 0, a1, len1, copy1) + len1 += copy1 + if(copy2 > 0) { + advance() + System.arraycopy(data, copy1, a1, 0, copy2) + len1 += copy2 + } + } + } + + private[this] def addArrN(slice: Array[AnyRef], dim: Int): Unit = { +// assert(dim >= 2) +// assert(lenRest % WIDTH == 0) +// assert(len1 == 0 || len1 == WIDTH) + if (slice.isEmpty) return + if (len1 == WIDTH) advance() + val sl = slice.length + (dim: @switch) match { + case 2 => + // lenRest is always a multiple of WIDTH + val copy1 = mmin(((WIDTH2 - lenRest) >>> BITS) & MASK, sl) + val copy2 = sl - copy1 + val destPos = (lenRest >>> BITS) & MASK + System.arraycopy(slice, 0, a2, destPos, copy1) + advanceN(WIDTH * copy1) + if (copy2 > 0) { + System.arraycopy(slice, copy1, a2, 0, copy2) + advanceN(WIDTH * copy2) + } + case 3 => + if (lenRest % WIDTH2 != 0) { + // lenRest is not multiple of WIDTH2, so this slice does not align, need to try lower dimension + slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 2)) + return + } + val copy1 = mmin(((WIDTH3 - lenRest) >>> BITS2) & MASK, sl) + val copy2 = sl - copy1 + val destPos = (lenRest >>> BITS2) & MASK + System.arraycopy(slice, 0, a3, destPos, copy1) + advanceN(WIDTH2 * copy1) + if (copy2 > 0) { + System.arraycopy(slice, copy1, a3, 0, copy2) + advanceN(WIDTH2 * copy2) + } + case 4 => + if (lenRest % WIDTH3 != 0) { + // lenRest is not multiple of WIDTH3, so this slice does not align, need to try lower dimensions + slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 3)) + return + } + val copy1 = mmin(((WIDTH4 - lenRest) >>> BITS3) & MASK, sl) + val copy2 = sl - copy1 + val destPos = (lenRest >>> BITS3) & MASK + System.arraycopy(slice, 0, a4, destPos, copy1) + advanceN(WIDTH3 * copy1) + if (copy2 > 0) { + System.arraycopy(slice, copy1, a4, 0, copy2) + advanceN(WIDTH3 * copy2) + } + case 5 => + if (lenRest % WIDTH4 != 0) { + // lenRest is not multiple of WIDTH4, so this slice does not align, need to try lower dimensions + slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 4)) + return + } + val copy1 = mmin(((WIDTH5 - lenRest) >>> BITS4) & MASK, sl) + val copy2 = sl - copy1 + val destPos = (lenRest >>> BITS4) & MASK + System.arraycopy(slice, 0, a5, destPos, copy1) + advanceN(WIDTH4 * copy1) + if (copy2 > 0) { + System.arraycopy(slice, copy1, a5, 0, copy2) + advanceN(WIDTH4 * copy2) + } + case 6 => // note width is now LASTWIDTH + if (lenRest % WIDTH5 != 0) { + // lenRest is not multiple of WIDTH5, so this slice does not align, need to try lower dimensions + slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 5)) + return + } + val copy1 = sl + // there is no copy2 because there can't be another a6 to copy to + val destPos = lenRest >>> BITS5 + if (destPos + copy1 > LASTWIDTH) + throw new IllegalArgumentException("exceeding 2^31 elements") + System.arraycopy(slice, 0, a6, destPos, copy1) + advanceN(WIDTH5 * copy1) + } + } + + private[this] def addVector(xs: Vector[A]): this.type = { + val sliceCount = xs.vectorSliceCount + var sliceIdx = 0 + while(sliceIdx < sliceCount) { + val slice = xs.vectorSlice(sliceIdx) + vectorSliceDim(sliceCount, sliceIdx) match { + case 1 => addArr1(slice.asInstanceOf[Arr1]) + case n if len1 == WIDTH || len1 == 0 => + addArrN(slice.asInstanceOf[Array[AnyRef]], n) + case n => foreachRec(n-2, slice, addArr1) + } + sliceIdx += 1 + } + this + } + + override def addAll(xs: IterableOnce[A]): this.type = xs match { + case v: Vector[_] => + if(len1 == 0 && lenRest == 0 && !prefixIsRightAligned) initFrom(v) + else addVector(v.asInstanceOf[Vector[A]]) + case _ => + super.addAll(xs) + } + + private[this] def advance(): Unit = { + val idx = lenRest + WIDTH + val xor = idx ^ lenRest + lenRest = idx + len1 = 0 + advance1(idx, xor) + } + + private[this] def advanceN(n: Int): Unit = if (n > 0) { + // assert(n % 32 == 0) + val idx = lenRest + n + val xor = idx ^ lenRest + lenRest = idx + len1 = 0 + advance1(idx, xor) + } + + private[this] def advance1(idx: Int, xor: Int): Unit = { + if (xor <= 0) { // level = 6 or something very unexpected happened + throw new IllegalArgumentException(s"advance1($idx, $xor): a1=$a1, a2=$a2, a3=$a3, a4=$a4, a5=$a5, a6=$a6, depth=$depth") + } else if (xor < WIDTH2) { // level = 1 + if (depth <= 1) { a2 = new Array(WIDTH); a2(0) = a1; depth = 2 } + a1 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + } else if (xor < WIDTH3) { // level = 2 + if (depth <= 2) { a3 = new Array(WIDTH); a3(0) = a2; depth = 3 } + a1 = new Array(WIDTH) + a2 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + a3((idx >>> BITS2) & MASK) = a2 + } else if (xor < WIDTH4) { // level = 3 + if (depth <= 3) { a4 = new Array(WIDTH); a4(0) = a3; depth = 4 } + a1 = new Array(WIDTH) + a2 = new Array(WIDTH) + a3 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + a3((idx >>> BITS2) & MASK) = a2 + a4((idx >>> BITS3) & MASK) = a3 + } else if (xor < WIDTH5) { // level = 4 + if (depth <= 4) { a5 = new Array(WIDTH); a5(0) = a4; depth = 5 } + a1 = new Array(WIDTH) + a2 = new Array(WIDTH) + a3 = new Array(WIDTH) + a4 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + a3((idx >>> BITS2) & MASK) = a2 + a4((idx >>> BITS3) & MASK) = a3 + a5((idx >>> BITS4) & MASK) = a4 + } else { // level = 5 + if (depth <= 5) { a6 = new Array(LASTWIDTH); a6(0) = a5; depth = 6 } + a1 = new Array(WIDTH) + a2 = new Array(WIDTH) + a3 = new Array(WIDTH) + a4 = new Array(WIDTH) + a5 = new Array(WIDTH) + a2((idx >>> BITS) & MASK) = a1 + a3((idx >>> BITS2) & MASK) = a2 + a4((idx >>> BITS3) & MASK) = a3 + a5((idx >>> BITS4) & MASK) = a4 + a6(idx >>> BITS5) = a5 + } + } + + def result(): Vector[A] = { + if (prefixIsRightAligned) leftAlignPrefix() + val len = len1 + lenRest + val realLen = len - offset + if(realLen == 0) Vector.empty + else if(len < 0) throw new IndexOutOfBoundsException(s"Vector cannot have negative size $len") + else if(len <= WIDTH) { + new Vector1(copyIfDifferentSize(a1, realLen)) + } else if(len <= WIDTH2) { + val i1 = (len-1) & MASK + val i2 = (len-1) >>> BITS + val data = copyOfRange(a2, 1, i2) + val prefix1 = a2(0) + val suffix1 = copyIfDifferentSize(a2(i2), i1+1) + new Vector2(prefix1, WIDTH-offset, data, suffix1, realLen) + } else if(len <= WIDTH3) { + val i1 = (len-1) & MASK + val i2 = ((len-1) >>> BITS) & MASK + val i3 = ((len-1) >>> BITS2) + val data = copyOfRange(a3, 1, i3) + val prefix2 = copyTail(a3(0)) + val prefix1 = a3(0)(0) + val suffix2 = copyOf(a3(i3), i2) + val suffix1 = copyIfDifferentSize(a3(i3)(i2), i1+1) + val len1 = prefix1.length + val len12 = len1 + prefix2.length*WIDTH + new Vector3(prefix1, len1, prefix2, len12, data, suffix2, suffix1, realLen) + } else if(len <= WIDTH4) { + val i1 = (len-1) & MASK + val i2 = ((len-1) >>> BITS) & MASK + val i3 = ((len-1) >>> BITS2) & MASK + val i4 = ((len-1) >>> BITS3) + val data = copyOfRange(a4, 1, i4) + val prefix3 = copyTail(a4(0)) + val prefix2 = copyTail(a4(0)(0)) + val prefix1 = a4(0)(0)(0) + val suffix3 = copyOf(a4(i4), i3) + val suffix2 = copyOf(a4(i4)(i3), i2) + val suffix1 = copyIfDifferentSize(a4(i4)(i3)(i2), i1+1) + val len1 = prefix1.length + val len12 = len1 + prefix2.length*WIDTH + val len123 = len12 + prefix3.length*WIDTH2 + new Vector4(prefix1, len1, prefix2, len12, prefix3, len123, data, suffix3, suffix2, suffix1, realLen) + } else if(len <= WIDTH5) { + val i1 = (len-1) & MASK + val i2 = ((len-1) >>> BITS) & MASK + val i3 = ((len-1) >>> BITS2) & MASK + val i4 = ((len-1) >>> BITS3) & MASK + val i5 = ((len-1) >>> BITS4) + val data = copyOfRange(a5, 1, i5) + val prefix4 = copyTail(a5(0)) + val prefix3 = copyTail(a5(0)(0)) + val prefix2 = copyTail(a5(0)(0)(0)) + val prefix1 = a5(0)(0)(0)(0) + val suffix4 = copyOf(a5(i5), i4) + val suffix3 = copyOf(a5(i5)(i4), i3) + val suffix2 = copyOf(a5(i5)(i4)(i3), i2) + val suffix1 = copyIfDifferentSize(a5(i5)(i4)(i3)(i2), i1+1) + val len1 = prefix1.length + val len12 = len1 + prefix2.length*WIDTH + val len123 = len12 + prefix3.length*WIDTH2 + val len1234 = len123 + prefix4.length*WIDTH3 + new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data, suffix4, suffix3, suffix2, suffix1, realLen) + } else { + val i1 = (len-1) & MASK + val i2 = ((len-1) >>> BITS) & MASK + val i3 = ((len-1) >>> BITS2) & MASK + val i4 = ((len-1) >>> BITS3) & MASK + val i5 = ((len-1) >>> BITS4) & MASK + val i6 = ((len-1) >>> BITS5) + val data = copyOfRange(a6, 1, i6) + val prefix5 = copyTail(a6(0)) + val prefix4 = copyTail(a6(0)(0)) + val prefix3 = copyTail(a6(0)(0)(0)) + val prefix2 = copyTail(a6(0)(0)(0)(0)) + val prefix1 = a6(0)(0)(0)(0)(0) + val suffix5 = copyOf(a6(i6), i5) + val suffix4 = copyOf(a6(i6)(i5), i4) + val suffix3 = copyOf(a6(i6)(i5)(i4), i3) + val suffix2 = copyOf(a6(i6)(i5)(i4)(i3), i2) + val suffix1 = copyIfDifferentSize(a6(i6)(i5)(i4)(i3)(i2), i1+1) + val len1 = prefix1.length + val len12 = len1 + prefix2.length*WIDTH + val len123 = len12 + prefix3.length*WIDTH2 + val len1234 = len123 + prefix4.length*WIDTH3 + val len12345 = len1234 + prefix5.length*WIDTH4 + new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data, suffix5, suffix4, suffix3, suffix2, suffix1, realLen) + } + } + + override def toString: String = + s"VectorBuilder(len1=$len1, lenRest=$lenRest, offset=$offset, depth=$depth)" + + private[immutable] def getData: Array[Array[_]] = Array[Array[AnyRef]]( + a1, a2.asInstanceOf[Array[AnyRef]], a3.asInstanceOf[Array[AnyRef]], a4.asInstanceOf[Array[AnyRef]], + a5.asInstanceOf[Array[AnyRef]], a6.asInstanceOf[Array[AnyRef]] + ).asInstanceOf[Array[Array[_]]] +} + + +/** Compile-time definitions for Vector. No references to this object should appear in bytecode. */ +private[immutable] object VectorInline { + // compile-time numeric constants + final val BITS = 5 + final val WIDTH = 1 << BITS + final val MASK = WIDTH - 1 + final val BITS2 = BITS * 2 + final val WIDTH2 = 1 << BITS2 + final val BITS3 = BITS * 3 + final val WIDTH3 = 1 << BITS3 + final val BITS4 = BITS * 4 + final val WIDTH4 = 1 << BITS4 + final val BITS5 = BITS * 5 + final val WIDTH5 = 1 << BITS5 + final val LASTWIDTH = WIDTH << 1 // 1 extra bit in the last level to go up to Int.MaxValue (2^31-1) instead of 2^30: + final val Log2ConcatFaster = 5 + final val AlignToFaster = 64 + + type Arr1 = Array[AnyRef] + type Arr2 = Array[Array[AnyRef]] + type Arr3 = Array[Array[Array[AnyRef]]] + type Arr4 = Array[Array[Array[Array[AnyRef]]]] + type Arr5 = Array[Array[Array[Array[Array[AnyRef]]]]] + type Arr6 = Array[Array[Array[Array[Array[Array[AnyRef]]]]]] + + /** Dimension of the slice at index */ + @inline def vectorSliceDim(count: Int, idx: Int): Int = { + val c = count/2 + c+1-abs(idx-c) + } + + @inline def copyOrUse[T <: AnyRef](a: Array[T], start: Int, end: Int): Array[T] = + if(start == 0 && end == a.length) a else copyOfRange[T](a, start, end) + + @inline final def copyTail[T <: AnyRef](a: Array[T]): Array[T] = copyOfRange[T](a, 1, a.length) + + @inline final def copyInit[T <: AnyRef](a: Array[T]): Array[T] = copyOfRange[T](a, 0, a.length-1) + + @inline final def copyIfDifferentSize[T <: AnyRef](a: Array[T], len: Int): Array[T] = + if(a.length == len) a else copyOf[T](a, len) + + @inline final def wrap1(x: Any ): Arr1 = { val a = new Arr1(1); a(0) = x.asInstanceOf[AnyRef]; a } + @inline final def wrap2(x: Arr1): Arr2 = { val a = new Arr2(1); a(0) = x; a } + @inline final def wrap3(x: Arr2): Arr3 = { val a = new Arr3(1); a(0) = x; a } + @inline final def wrap4(x: Arr3): Arr4 = { val a = new Arr4(1); a(0) = x; a } + @inline final def wrap5(x: Arr4): Arr5 = { val a = new Arr5(1); a(0) = x; a } + + @inline final def copyUpdate(a1: Arr1, idx1: Int, elem: Any): Arr1 = { + val a1c = a1.clone() + a1c(idx1) = elem.asInstanceOf[AnyRef] + a1c + } + + @inline final def copyUpdate(a2: Arr2, idx2: Int, idx1: Int, elem: Any): Arr2 = { + val a2c = a2.clone() + a2c(idx2) = copyUpdate(a2c(idx2), idx1, elem) + a2c + } + + @inline final def copyUpdate(a3: Arr3, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr3 = { + val a3c = a3.clone() + a3c(idx3) = copyUpdate(a3c(idx3), idx2, idx1, elem) + a3c + } + + @inline final def copyUpdate(a4: Arr4, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr4 = { + val a4c = a4.clone() + a4c(idx4) = copyUpdate(a4c(idx4), idx3, idx2, idx1, elem) + a4c + } + + @inline final def copyUpdate(a5: Arr5, idx5: Int, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr5 = { + val a5c = a5.clone() + a5c(idx5) = copyUpdate(a5c(idx5), idx4, idx3, idx2, idx1, elem) + a5c + } + + @inline final def copyUpdate(a6: Arr6, idx6: Int, idx5: Int, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr6 = { + val a6c = a6.clone() + a6c(idx6) = copyUpdate(a6c(idx6), idx5, idx4, idx3, idx2, idx1, elem) + a6c + } + + @inline final def concatArrays[T <: AnyRef](a: Array[T], b: Array[T]): Array[T] = { + val dest = copyOf[T](a, a.length+b.length) + System.arraycopy(b, 0, dest, a.length, b.length) + dest + } +} + + +/** Helper methods and constants for Vector. */ +private object VectorStatics { + + final def copyAppend1(a: Arr1, elem: Any): Arr1 = { + val alen = a.length + val ac = new Arr1(alen+1) + System.arraycopy(a, 0, ac, 0, alen) + ac(alen) = elem.asInstanceOf[AnyRef] + ac + } + + final def copyAppend[T <: AnyRef](a: Array[T], elem: T): Array[T] = { + val ac = copyOf(a, a.length+1) + ac(ac.length-1) = elem + ac + } + + final def copyPrepend1(elem: Any, a: Arr1): Arr1 = { + val ac = new Arr1(a.length+1) + System.arraycopy(a, 0, ac, 1, a.length) + ac(0) = elem.asInstanceOf[AnyRef] + ac + } + + final def copyPrepend[T <: AnyRef](elem: T, a: Array[T]): Array[T] = { + val ac = java.lang.reflect.Array.newInstance(a.getClass.getComponentType, a.length+1).asInstanceOf[Array[T]] + System.arraycopy(a, 0, ac, 1, a.length) + ac(0) = elem + ac + } + + final val empty1: Arr1 = new Array(0) + final val empty2: Arr2 = new Array(0) + final val empty3: Arr3 = new Array(0) + final val empty4: Arr4 = new Array(0) + final val empty5: Arr5 = new Array(0) + final val empty6: Arr6 = new Array(0) + + final def foreachRec[T <: AnyRef, A, U](level: Int, a: Array[T], f: A => U): Unit = { + var i = 0 + val len = a.length + if(level == 0) { + while(i < len) { + f(a(i).asInstanceOf[A]) + i += 1 + } + } else { + val l = level-1 + while(i < len) { + foreachRec(l, a(i).asInstanceOf[Array[AnyRef]], f) + i += 1 + } + } + } + + final def mapElems1[A, B](a: Arr1, f: A => B): Arr1 = { + var i = 0 + while(i < a.length) { + val v1 = a(i).asInstanceOf[AnyRef] + val v2 = f(v1.asInstanceOf[A]).asInstanceOf[AnyRef] + if(v1 ne v2) + return mapElems1Rest(a, f, i, v2) + i += 1 + } + a + } + + final def mapElems1Rest[A, B](a: Arr1, f: A => B, at: Int, v2: AnyRef): Arr1 = { + val ac = new Arr1(a.length) + if(at > 0) System.arraycopy(a, 0, ac, 0, at) + ac(at) = v2 + var i = at+1 + while(i < a.length) { + ac(i) = f(a(i).asInstanceOf[A]).asInstanceOf[AnyRef] + i += 1 + } + ac + } + + final def mapElems[A, B, T <: AnyRef](n: Int, a: Array[T], f: A => B): Array[T] = { + if(n == 1) + mapElems1[A, B](a.asInstanceOf[Arr1], f).asInstanceOf[Array[T]] + else { + var i = 0 + while(i < a.length) { + val v1 = a(i) + val v2 = mapElems(n-1, v1.asInstanceOf[Array[AnyRef]], f) + if(v1 ne v2) + return mapElemsRest(n, a, f, i, v2) + i += 1 + } + a + } + } + + final def mapElemsRest[A, B, T <: AnyRef](n: Int, a: Array[T], f: A => B, at: Int, v2: AnyRef): Array[T] = { + val ac = java.lang.reflect.Array.newInstance(a.getClass.getComponentType, a.length).asInstanceOf[Array[AnyRef]] + if(at > 0) System.arraycopy(a, 0, ac, 0, at) + ac(at) = v2 + var i = at+1 + while(i < a.length) { + ac(i) = mapElems(n-1, a(i).asInstanceOf[Array[AnyRef]], f) + i += 1 + } + ac.asInstanceOf[Array[T]] + } + + final def prepend1IfSpace(prefix1: Arr1, xs: IterableOnce[_]): Arr1 = xs match { + case it: Iterable[_] => + if(it.sizeCompare(WIDTH-prefix1.length) <= 0) { + it.size match { + case 0 => null + case 1 => copyPrepend(it.head.asInstanceOf[AnyRef], prefix1) + case s => + val prefix1b = new Arr1(prefix1.length + s) + System.arraycopy(prefix1, 0, prefix1b, s, prefix1.length) + it.copyToArray(prefix1b.asInstanceOf[Array[Any]], 0) + prefix1b + } + } else null + case it => + val s = it.knownSize + if(s > 0 && s <= WIDTH-prefix1.length) { + val prefix1b = new Arr1(prefix1.length + s) + System.arraycopy(prefix1, 0, prefix1b, s, prefix1.length) + it.iterator.copyToArray(prefix1b.asInstanceOf[Array[Any]], 0) + prefix1b + } else null + } + + final def append1IfSpace(suffix1: Arr1, xs: IterableOnce[_]): Arr1 = xs match { + case it: Iterable[_] => + if(it.sizeCompare(WIDTH-suffix1.length) <= 0) { + it.size match { + case 0 => null + case 1 => copyAppend(suffix1, it.head.asInstanceOf[AnyRef]) + case s => + val suffix1b = copyOf(suffix1, suffix1.length + s) + it.copyToArray(suffix1b.asInstanceOf[Array[Any]], suffix1.length) + suffix1b + } + } else null + case it => + val s = it.knownSize + if(s > 0 && s <= WIDTH-suffix1.length) { + val suffix1b = copyOf(suffix1, suffix1.length + s) + it.iterator.copyToArray(suffix1b.asInstanceOf[Array[Any]], suffix1.length) + suffix1b + } else null + } +} + + +private final class NewVectorIterator[A](v: Vector[A], private[this] var totalLength: Int, private[this] val sliceCount: Int) extends Iterator[A] with java.lang.Cloneable { + + private[this] var a1: Arr1 = v.prefix1 + private[this] var a2: Arr2 = _ + private[this] var a3: Arr3 = _ + private[this] var a4: Arr4 = _ + private[this] var a5: Arr5 = _ + private[this] var a6: Arr6 = _ + private[this] var a1len = a1.length + private[this] var i1 = 0 // current index in a1 + private[this] var oldPos = 0 + private[this] var len1 = totalLength // remaining length relative to a1 + + private[this] var sliceIdx = 0 + private[this] var sliceDim = 1 + private[this] var sliceStart = 0 // absolute position + private[this] var sliceEnd = a1len // absolute position + + //override def toString: String = + // s"NewVectorIterator(v=$v, totalLength=$totalLength, sliceCount=$sliceCount): a1len=$a1len, len1=$len1, i1=$i1, sliceEnd=$sliceEnd" + + @inline override def knownSize = len1 - i1 + + @inline def hasNext: Boolean = len1 > i1 + + def next(): A = { + if(i1 == a1len) advance() + val r = a1(i1) + i1 += 1 + r.asInstanceOf[A] + } + + private[this] def advanceSlice(): Unit = { + if(!hasNext) Iterator.empty.next() + sliceIdx += 1 + var slice: Array[_ <: AnyRef] = v.vectorSlice(sliceIdx) + while(slice.length == 0) { + sliceIdx += 1 + slice = v.vectorSlice(sliceIdx) + } + sliceStart = sliceEnd + sliceDim = vectorSliceDim(sliceCount, sliceIdx) + (sliceDim: @switch) match { + case 1 => a1 = slice.asInstanceOf[Arr1] + case 2 => a2 = slice.asInstanceOf[Arr2] + case 3 => a3 = slice.asInstanceOf[Arr3] + case 4 => a4 = slice.asInstanceOf[Arr4] + case 5 => a5 = slice.asInstanceOf[Arr5] + case 6 => a6 = slice.asInstanceOf[Arr6] + } + sliceEnd = sliceStart + slice.length * (1 << (BITS*(sliceDim-1))) + if(sliceEnd > totalLength) sliceEnd = totalLength + if(sliceDim > 1) oldPos = (1 << (BITS*sliceDim))-1 + } + + private[this] def advance(): Unit = { + val pos = i1-len1+totalLength + if(pos == sliceEnd) advanceSlice() + if(sliceDim > 1) { + val io = pos - sliceStart + val xor = oldPos ^ io + advanceA(io, xor) + oldPos = io + } + len1 -= i1 + a1len = mmin(a1.length, len1) + i1 = 0 + } + + private[this] def advanceA(io: Int, xor: Int): Unit = { + if(xor < WIDTH2) { + a1 = a2((io >>> BITS) & MASK) + } else if(xor < WIDTH3) { + a2 = a3((io >>> BITS2) & MASK) + a1 = a2(0) + } else if(xor < WIDTH4) { + a3 = a4((io >>> BITS3) & MASK) + a2 = a3(0) + a1 = a2(0) + } else if(xor < WIDTH5) { + a4 = a5((io >>> BITS4) & MASK) + a3 = a4(0) + a2 = a3(0) + a1 = a2(0) + } else { + a5 = a6(io >>> BITS5) + a4 = a5(0) + a3 = a4(0) + a2 = a3(0) + a1 = a2(0) + } + } + + private[this] def setA(io: Int, xor: Int): Unit = { + if(xor < WIDTH2) { + a1 = a2((io >>> BITS) & MASK) + } else if(xor < WIDTH3) { + a2 = a3((io >>> BITS2) & MASK) + a1 = a2((io >>> BITS) & MASK) + } else if(xor < WIDTH4) { + a3 = a4((io >>> BITS3) & MASK) + a2 = a3((io >>> BITS2) & MASK) + a1 = a2((io >>> BITS) & MASK) + } else if(xor < WIDTH5) { + a4 = a5((io >>> BITS4) & MASK) + a3 = a4((io >>> BITS3) & MASK) + a2 = a3((io >>> BITS2) & MASK) + a1 = a2((io >>> BITS) & MASK) + } else { + a5 = a6(io >>> BITS5) + a4 = a5((io >>> BITS4) & MASK) + a3 = a4((io >>> BITS3) & MASK) + a2 = a3((io >>> BITS2) & MASK) + a1 = a2((io >>> BITS) & MASK) + } + } + + override def drop(n: Int): Iterator[A] = { + if(n > 0) { + val oldpos = i1-len1+totalLength + val newpos = mmin(oldpos + n, totalLength) + if(newpos == totalLength) { + i1 = 0 + len1 = 0 + a1len = 0 + } else { + while(newpos >= sliceEnd) advanceSlice() + val io = newpos - sliceStart + if(sliceDim > 1) { + val xor = oldPos ^ io + setA(io, xor) + oldPos = io + } + a1len = a1.length + i1 = io & MASK + len1 = i1 + (totalLength-newpos) + if(a1len > len1) a1len = len1 + } + } + this + } + + override def take(n: Int): Iterator[A] = { + if(n < knownSize) { + val trunc = knownSize - mmax(0, n) + totalLength -= trunc + len1 -= trunc + if(len1 < a1len) a1len = len1 + if(totalLength < sliceEnd) sliceEnd = totalLength + } + this + } + + override def slice(from: Int, until: Int): Iterator[A] = { + val _until = + if(from > 0) { + drop(from) + until - from + } else until + take(_until) + } + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val xsLen = xs.length + val total = IterableOnce.elemsToCopyToArray(knownSize, xsLen, start, len) + var copied = 0 + val isBoxed = xs.isInstanceOf[Array[AnyRef]] + while(copied < total) { + if(i1 == a1len) advance() + val count = mmin(total-copied, a1.length-i1) + if(isBoxed) System.arraycopy(a1, i1, xs, start+copied, count) + else Array.copy(a1, i1, xs, start+copied, count) + i1 += count + copied += count + } + total + } + + override def toVector: Vector[A] = + v.slice(i1-len1+totalLength, totalLength) + + protected[immutable] def split(at: Int): NewVectorIterator[A] = { + val it2 = clone().asInstanceOf[NewVectorIterator[A]] + it2.take(at) + drop(at) + it2 + } +} + + +private abstract class VectorStepperBase[A, Sub >: Null <: Stepper[A], Semi <: Sub](it: NewVectorIterator[A]) + extends Stepper[A] with EfficientSplit { + + protected[this] def build(it: NewVectorIterator[A]): Semi + + final def hasStep: Boolean = it.hasNext + + final def characteristics: Int = Spliterator.ORDERED + Spliterator.SIZED + Spliterator.SUBSIZED + + final def estimateSize: Long = it.knownSize + + def trySplit(): Sub = { + val len = it.knownSize + if(len > 1) build(it.split(len >>> 1)) + else null + } + + override final def iterator: Iterator[A] = it +} + +private class AnyVectorStepper[A](it: NewVectorIterator[A]) + extends VectorStepperBase[A, AnyStepper[A], AnyVectorStepper[A]](it) with AnyStepper[A] { + protected[this] def build(it: NewVectorIterator[A]) = new AnyVectorStepper(it) + def nextStep(): A = it.next() +} + +private class DoubleVectorStepper(it: NewVectorIterator[Double]) + extends VectorStepperBase[Double, DoubleStepper, DoubleVectorStepper](it) with DoubleStepper { + protected[this] def build(it: NewVectorIterator[Double]) = new DoubleVectorStepper(it) + def nextStep(): Double = it.next() +} + +private class IntVectorStepper(it: NewVectorIterator[Int]) + extends VectorStepperBase[Int, IntStepper, IntVectorStepper](it) with IntStepper { + protected[this] def build(it: NewVectorIterator[Int]) = new IntVectorStepper(it) + def nextStep(): Int = it.next() +} + +private class LongVectorStepper(it: NewVectorIterator[Long]) + extends VectorStepperBase[Long, LongStepper, LongVectorStepper](it) with LongStepper { + protected[this] def build(it: NewVectorIterator[Long]) = new LongVectorStepper(it) + def nextStep(): Long = it.next() +} + + +// The following definitions are needed for binary compatibility with ParVector +private[collection] class VectorIterator[+A](_startIndex: Int, private[this] var endIndex: Int) extends AbstractIterator[A] { + private[immutable] var it: NewVectorIterator[A @uncheckedVariance] = _ + def hasNext: Boolean = it.hasNext + def next(): A = it.next() + private[collection] def remainingElementCount: Int = it.size + private[collection] def remainingVector: Vector[A] = it.toVector +} diff --git a/tests/pos-special/stdlib/collection/immutable/VectorMap.scala b/tests/pos-special/stdlib/collection/immutable/VectorMap.scala new file mode 100644 index 000000000000..cd8cf06c5c68 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/VectorMap.scala @@ -0,0 +1,275 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package immutable + +import scala.annotation.tailrec + +/** This class implements immutable maps using a vector/map-based data structure, which preserves insertion order. + * + * Unlike `ListMap`, `VectorMap` has amortized effectively constant lookup at the expense + * of using extra memory and generally lower performance for other operations + * + * @tparam K the type of the keys contained in this vector map. + * @tparam V the type of the values associated with the keys in this vector map. + * + * @define coll immutable vector map + * @define Coll `immutable.VectorMap` + */ +final class VectorMap[K, +V] private ( + private[immutable] val fields: Vector[Any], + private[immutable] val underlying: Map[K, (Int, V)], dropped: Int) + extends AbstractMap[K, V] + with SeqMap[K, V] + with StrictOptimizedMapOps[K, V, VectorMap, VectorMap[K, V]] + with MapFactoryDefaults[K, V, VectorMap, Iterable] { + + import VectorMap._ + + override protected[this] def className: String = "VectorMap" + + private[immutable] def this(fields: Vector[K], underlying: Map[K, (Int, V)]) = { + this(fields, underlying, 0) + } + + override val size = underlying.size + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + def updated[V1 >: V](key: K, value: V1): VectorMap[K, V1] = { + underlying.get(key) match { + case Some((slot, _)) => + new VectorMap(fields, underlying.updated[(Int, V1)](key, (slot, value)), dropped) + case None => + new VectorMap(fields :+ key, underlying.updated[(Int, V1)](key, (fields.length + dropped, value)), dropped) + } + } + + override def withDefault[V1 >: V](d: K => V1): Map[K, V1] = + new Map.WithDefault(this, d) + + override def withDefaultValue[V1 >: V](d: V1): Map[K, V1] = + new Map.WithDefault[K, V1](this, _ => d) + + def get(key: K): Option[V] = underlying.get(key) match { + case Some(v) => Some(v._2) + case None => None + } + + @tailrec + private def nextValidField(slot: Int): (Int, K) = { + if (slot >= fields.size) (-1, null.asInstanceOf[K]) + else fields(slot) match { + case Tombstone(distance) => + nextValidField(slot + distance) + case k => + (slot, k.asInstanceOf[K]) + } + } + + def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { + private[this] val fieldsLength = fields.length + private[this] var slot = -1 + private[this] var key: K = null.asInstanceOf[K] + + private[this] def advance(): Unit = { + val nextSlot = slot + 1 + if (nextSlot >= fieldsLength) { + slot = fieldsLength + key = null.asInstanceOf[K] + } else { + nextValidField(nextSlot) match { + case (-1, _) => + slot = fieldsLength + key = null.asInstanceOf[K] + case (s, k) => + slot = s + key = k + } + } + } + + advance() + + override def hasNext: Boolean = slot < fieldsLength + + override def next(): (K, V) = { + if (!hasNext) throw new NoSuchElementException("next called on depleted iterator") + val result = (key, underlying(key)._2) + advance() + result + } + } + + // No-Op overrides to allow for more efficient steppers in a minor release. + // Refining the return type to `S with EfficientSplit` is binary compatible. + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S = super.stepper(shape) + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S = super.keyStepper(shape) + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S = super.valueStepper(shape) + + + def removed(key: K): VectorMap[K, V] = { + if (isEmpty) empty + else { + var fs = fields + val sz = fs.size + underlying.get(key) match { + case Some(_) if size == 1 => empty + case Some((slot, _)) => + val s = slot - dropped + + // Calculate next of kin + val next = + if (s < sz - 1) fs(s + 1) match { + case Tombstone(d) => s + d + 1 + case _ => s + 1 + } else s + 1 + + fs = fs.updated(s, Tombstone(next - s)) + + // Calculate first index of preceding tombstone sequence + val first = + if (s > 0) { + fs(s - 1) match { + case Tombstone(d) if d < 0 => if (s + d >= 0) s + d else 0 + case Tombstone(d) if d == 1 => s - 1 + case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d) + case _ => s + } + }else s + fs = fs.updated(first, Tombstone(next - first)) + + // Calculate last index of succeeding tombstone sequence + val last = next - 1 + if (last != first) { + fs = fs.updated(last, Tombstone(first - 1 - last)) + } + new VectorMap(fs, underlying - key, dropped) + case _ => + this + } + } + } + + override def mapFactory: MapFactory[VectorMap] = VectorMap + + override def contains(key: K): Boolean = underlying.contains(key) + + override def head: (K, V) = iterator.next() + + override def last: (K, V) = { + if (isEmpty) throw new UnsupportedOperationException("empty.last") + val lastSlot = fields.length - 1 + val last = fields.last match { + case Tombstone(d) if d < 0 => fields(lastSlot + d).asInstanceOf[K] + case Tombstone(d) if d == 1 => fields(lastSlot - 1).asInstanceOf[K] + case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d) + case k => k.asInstanceOf[K] + } + (last, underlying(last)._2) + } + + override def lastOption: Option[(K, V)] = { + if (isEmpty) None + else Some(last) + } + + override def tail: VectorMap[K, V] = { + if (isEmpty) throw new UnsupportedOperationException("empty.tail") + val (slot, key) = nextValidField(0) + new VectorMap(fields.drop(slot + 1), underlying - key, dropped + slot + 1) + } + + override def init: VectorMap[K, V] = { + if (isEmpty) throw new UnsupportedOperationException("empty.init") + val lastSlot = fields.size - 1 + val (slot, key) = fields.last match { + case Tombstone(d) if d < 0 => (lastSlot + d, fields(lastSlot + d).asInstanceOf[K]) + case Tombstone(d) if d == 1 => (lastSlot - 1, fields(lastSlot - 1).asInstanceOf[K]) + case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d) + case k => (lastSlot, k.asInstanceOf[K]) + } + new VectorMap(fields.dropRight(fields.size - slot), underlying - key, dropped) + } + + override def keys: Vector[K] = keysIterator.toVector + + override def values: Iterable[V] = new Iterable[V] with IterableFactoryDefaults[V, Iterable] { + override def iterator: Iterator[V] = keysIterator.map(underlying(_)._2) + } +} + +object VectorMap extends MapFactory[VectorMap] { + //Class to mark deleted slots in 'fields'. + //When one or more consecutive slots are deleted, the 'distance' of the first 'Tombstone' + // represents the distance to the location of the next undeleted slot (or the last slot in 'fields' +1 if it does not exist). + //When two or more consecutive slots are deleted, the 'distance' of the trailing 'Tombstone' + // represents the distance to the location of the previous undeleted slot ( or -1 if it does not exist) multiplied by -1. + //For other deleted slots, it simply indicates that they have been deleted. + private[VectorMap] final case class Tombstone(distance: Int) + + private[this] final val EmptyMap: VectorMap[Nothing, Nothing] = + new VectorMap[Nothing, Nothing](Vector.empty[Nothing], HashMap.empty[Nothing, (Int, Nothing)]) + + def empty[K, V]: VectorMap[K, V] = EmptyMap.asInstanceOf[VectorMap[K, V]] + + def from[K, V](it: collection.IterableOnce[(K, V)]): VectorMap[K, V] = + it match { + case vm: VectorMap[K, V] => vm + case _ => (newBuilder[K, V] ++= it).result() + } + + def newBuilder[K, V]: mutable.Builder[(K, V), VectorMap[K, V]] = new VectorMapBuilder[K, V] +} + +private[immutable] final class VectorMapBuilder[K, V] extends mutable.Builder[(K, V), VectorMap[K, V]] { + private[this] val vectorBuilder = new VectorBuilder[K] + private[this] val mapBuilder = new MapBuilderImpl[K, (Int, V)] + private[this] var aliased: VectorMap[K, V] = _ + + override def clear(): Unit = { + vectorBuilder.clear() + mapBuilder.clear() + aliased = null + } + + override def result(): VectorMap[K, V] = { + if (aliased eq null) { + aliased = new VectorMap(vectorBuilder.result(), mapBuilder.result()) + } + aliased + } + def addOne(key: K, value: V): this.type = { + if (aliased ne null) { + aliased = aliased.updated(key, value) + } else { + mapBuilder.getOrElse(key, null) match { + case (slot, _) => + mapBuilder.addOne(key, (slot, value)) + case null => + val vectorSize = vectorBuilder.size + vectorBuilder.addOne(key) + mapBuilder.addOne(key, (vectorSize, value)) + } + } + this + } + + override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2) +} diff --git a/tests/pos-special/stdlib/collection/immutable/WrappedString.scala b/tests/pos-special/stdlib/collection/immutable/WrappedString.scala new file mode 100644 index 000000000000..f2fdb8e3c32e --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/WrappedString.scala @@ -0,0 +1,140 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package immutable + +import scala.Predef.{wrapString => _, assert} +import scala.collection.Stepper.EfficientSplit +import scala.collection.convert.impl.CharStringStepper +import scala.collection.mutable.{Builder, StringBuilder} + +/** + * This class serves as a wrapper augmenting `String`s with all the operations + * found in indexed sequences. + * + * The difference between this class and `StringOps` is that calling transformer + * methods such as `filter` and `map` will yield an object of type `WrappedString` + * rather than a `String`. + * + * @param self a string contained within this wrapped string + * + * @define Coll `WrappedString` + * @define coll wrapped string + */ +@SerialVersionUID(3L) +final class WrappedString(private val self: String) extends AbstractSeq[Char] with IndexedSeq[Char] + with IndexedSeqOps[Char, IndexedSeq, WrappedString] + with Serializable { + + def apply(i: Int): Char = self.charAt(i) + + override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]): WrappedString = WrappedString.fromSpecific(coll) + override protected def newSpecificBuilder: Builder[Char, WrappedString] = WrappedString.newBuilder + override def empty: WrappedString = WrappedString.empty + + override def slice(from: Int, until: Int): WrappedString = { + val start = if (from < 0) 0 else from + if (until <= start || start >= self.length) + return WrappedString.empty + + val end = if (until > length) length else until + new WrappedString(self.substring(start, end)) + } + override def length = self.length + override def toString = self + override def view: StringView = new StringView(self) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = { + val st = new CharStringStepper(self, 0, self.length) + val r = + if (shape.shape == StepperShape.CharShape) st + else { + assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") + AnyStepper.ofParIntStepper(st) + } + r.asInstanceOf[S with EfficientSplit] + } + + override def startsWith[B >: Char](that: IterableOnce[B], offset: Int = 0): Boolean = + that match { + case s: WrappedString => self.startsWith(s.self, offset) + case _ => super.startsWith(that, offset) + } + + override def endsWith[B >: Char](that: collection.Iterable[B]): Boolean = + that match { + case s: WrappedString => self.endsWith(s.self) + case _ => super.endsWith(that) + } + + override def indexOf[B >: Char](elem: B, from: Int = 0): Int = elem match { + case c: Char => self.indexOf(c, from) + case _ => super.indexOf(elem, from) + } + + override def lastIndexOf[B >: Char](elem: B, end: Int = length - 1): Int = + elem match { + case c: Char => self.lastIndexOf(c, end) + case _ => super.lastIndexOf(elem, end) + } + + override def copyToArray[B >: Char](xs: Array[B], start: Int, len: Int): Int = + (xs: Any) match { + case chs: Array[Char] => + val copied = IterableOnce.elemsToCopyToArray(length, chs.length, start, len) + self.getChars(0, copied, chs, start) + copied + case _ => super.copyToArray(xs, start, len) + } + + override def appendedAll[B >: Char](suffix: IterableOnce[B]): IndexedSeq[B] = + suffix match { + case s: WrappedString => new WrappedString(self concat s.self) + case _ => super.appendedAll(suffix) + } + + override def sameElements[B >: Char](o: IterableOnce[B]) = o match { + case s: WrappedString => self == s.self + case _ => super.sameElements(o) + } + + override protected[this] def className = "WrappedString" + + override protected final def applyPreferredMaxLength: Int = Int.MaxValue + override def equals(other: Any): Boolean = other match { + case that: WrappedString => + this.self == that.self + case _ => + super.equals(other) + } +} + +/** A companion object for wrapped strings. + */ +@SerialVersionUID(3L) +object WrappedString extends SpecificIterableFactory[Char, WrappedString] { + def fromSpecific(it: IterableOnce[Char]): WrappedString = { + val b = newBuilder + val s = it.knownSize + if(s >= 0) b.sizeHint(s) + b ++= it + b.result() + } + val empty: WrappedString = new WrappedString("") + def newBuilder: Builder[Char, WrappedString] = + new StringBuilder().mapResult(x => new WrappedString(x)) + + implicit class UnwrapOp(private val value: WrappedString) extends AnyVal { + def unwrap: String = value.self + } +} diff --git a/tests/pos-special/stdlib/collection/immutable/package.scala b/tests/pos-special/stdlib/collection/immutable/package.scala new file mode 100644 index 000000000000..8458429727e8 --- /dev/null +++ b/tests/pos-special/stdlib/collection/immutable/package.scala @@ -0,0 +1,29 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + + +package object immutable { + type StringOps = scala.collection.StringOps + val StringOps = scala.collection.StringOps + type StringView = scala.collection.StringView + val StringView = scala.collection.StringView + + @deprecated("Use Iterable instead of Traversable", "2.13.0") + type Traversable[+X] = Iterable[X] + @deprecated("Use Iterable instead of Traversable", "2.13.0") + val Traversable = Iterable + + @deprecated("Use Map instead of DefaultMap", "2.13.0") + type DefaultMap[K, +V] = scala.collection.immutable.Map[K, V] +} diff --git a/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala b/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala new file mode 100644 index 000000000000..c02a10770696 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala @@ -0,0 +1,601 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.annotation.nowarn +import scala.collection.generic.DefaultSerializationProxy +import scala.language.implicitConversions + +/** This class implements mutable maps with `AnyRef` keys based on a hash table with open addressing. + * + * Basic map operations on single entries, including `contains` and `get`, + * are typically significantly faster with `AnyRefMap` than [[HashMap]]. + * Note that numbers and characters are not handled specially in AnyRefMap; + * only plain `equals` and `hashCode` are used in comparisons. + * + * Methods that traverse or regenerate the map, including `foreach` and `map`, + * are not in general faster than with `HashMap`. The methods `foreachKey`, + * `foreachValue`, `mapValuesNow`, and `transformValues` are, however, faster + * than alternative ways to achieve the same functionality. + * + * Maps with open addressing may become less efficient at lookup after + * repeated addition/removal of elements. Although `AnyRefMap` makes a + * decent attempt to remain efficient regardless, calling `repack` + * on a map that will no longer have elements removed but will be + * used heavily may save both time and storage space. + * + * This map is not intended to contain more than 2^29^ entries (approximately + * 500 million). The maximum capacity is 2^30^, but performance will degrade + * rapidly as 2^30^ is approached. + * + */ +class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initialBufferSize: Int, initBlank: Boolean) + extends AbstractMap[K, V] + with MapOps[K, V, Map, AnyRefMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, AnyRefMap[K, V]] + with Serializable { + + import AnyRefMap._ + def this() = this(AnyRefMap.exceptionDefault, 16, true) + + /** Creates a new `AnyRefMap` that returns default values according to a supplied key-value mapping. */ + def this(defaultEntry: K => V) = this(defaultEntry, 16, true) + + /** Creates a new `AnyRefMap` with an initial buffer of specified size. + * + * An `AnyRefMap` can typically contain half as many elements as its buffer size + * before it requires resizing. + */ + def this(initialBufferSize: Int) = this(AnyRefMap.exceptionDefault, initialBufferSize, true) + + /** Creates a new `AnyRefMap` with specified default values and initial buffer size. */ + def this(defaultEntry: K => V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) + + private[this] var mask = 0 + private[this] var _size = 0 + private[this] var _vacant = 0 + private[this] var _hashes: Array[Int] = null + private[this] var _keys: Array[AnyRef] = null + private[this] var _values: Array[AnyRef] = null + + if (initBlank) defaultInitialize(initialBufferSize) + + private[this] def defaultInitialize(n: Int): Unit = { + mask = + if (n<0) 0x7 + else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7 + _hashes = new Array[Int](mask+1) + _keys = new Array[AnyRef](mask+1) + _values = new Array[AnyRef](mask+1) + } + + private[collection] def initializeTo( + m: Int, sz: Int, vc: Int, hz: Array[Int], kz: Array[AnyRef], vz: Array[AnyRef] + ): Unit = { + mask = m; _size = sz; _vacant = vc; _hashes = hz; _keys = kz; _values = vz + } + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]): AnyRefMap[K,V] = { + var sz = coll.knownSize + if(sz < 0) sz = 4 + val arm = new AnyRefMap[K, V](sz * 2) + coll.iterator.foreach{ case (k,v) => arm(k) = v } + if (arm.size < (sz>>3)) arm.repack() + arm + } + override protected def newSpecificBuilder: Builder[(K, V), AnyRefMap[K,V]] = new AnyRefMapBuilder + + override def size: Int = _size + override def knownSize: Int = size + override def isEmpty: Boolean = _size == 0 + override def empty: AnyRefMap[K,V] = new AnyRefMap(defaultEntry) + + private def imbalanced: Boolean = + (_size + _vacant) > 0.5*mask || _vacant > _size + + private def hashOf(key: K): Int = { + // Note: this method must not return 0 or Int.MinValue, as these indicate no element + if (key eq null) 0x41081989 + else { + val h = key.hashCode + // Part of the MurmurHash3 32 bit finalizer + val i = (h ^ (h >>> 16)) * 0x85EBCA6B + val j = (i ^ (i >>> 13)) & 0x7FFFFFFF + if (j==0) 0x41081989 else j + } + } + + private def seekEntry(h: Int, k: AnyRef): Int = { + var e = h & mask + var x = 0 + var g = 0 + val hashes = _hashes + val keys = _keys + while ({ g = hashes(e); g != 0}) { + if (g == h && { val q = keys(e); (q eq k) || ((q ne null) && (q equals k)) }) return e + x += 1 + e = (e + 2*(x+1)*x - 3) & mask + } + e | MissingBit + } + + @`inline` private def seekEntryOrOpen(h: Int, k: AnyRef): Int = { + var e = h & mask + var x = 0 + var g = 0 + var o = -1 + while ({ g = _hashes(e); g != 0}) { + if (g == h && { val q = _keys(e); (q eq k) || ((q ne null) && (q equals k)) }) return e + else if (o == -1 && g+g == 0) o = e + x += 1 + e = (e + 2*(x+1)*x - 3) & mask + } + if (o >= 0) o | MissVacant else e | MissingBit + } + + override def contains(key: K): Boolean = seekEntry(hashOf(key), key) >= 0 + + override def get(key: K): Option[V] = { + val i = seekEntry(hashOf(key), key) + if (i < 0) None else Some(_values(i).asInstanceOf[V]) + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val i = seekEntry(hashOf(key), key) + if (i < 0) default else _values(i).asInstanceOf[V] + } + + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + val h = hashOf(key) + var i = seekEntryOrOpen(h, key) + if (i < 0) { + // It is possible that the default value computation was side-effecting + // Our hash table may have resized or even contain what we want now + // (but if it does, we'll replace it) + val value = { + val oh = _hashes + val ans = defaultValue + if (oh ne _hashes) { + i = seekEntryOrOpen(h, key) + if (i >= 0) _size -= 1 + } + ans + } + _size += 1 + val j = i & IndexMask + _hashes(j) = h + _keys(j) = key.asInstanceOf[AnyRef] + _values(j) = value.asInstanceOf[AnyRef] + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + value + } + else _values(i).asInstanceOf[V] + } + + /** Retrieves the value associated with a key, or the default for that type if none exists + * (null for AnyRef, 0 for floats and integers). + * + * Note: this is the fastest way to retrieve a value that may or + * may not exist, if the default null/zero is acceptable. For key/value + * pairs that do exist, `apply` (i.e. `map(key)`) is equally fast. + */ + def getOrNull(key: K): V = { + val i = seekEntry(hashOf(key), key) + (if (i < 0) null else _values(i)).asInstanceOf[V] + } + + /** Retrieves the value associated with a key. + * If the key does not exist in the map, the `defaultEntry` for that key + * will be returned instead; an exception will be thrown if no + * `defaultEntry` was supplied. + */ + override def apply(key: K): V = { + val i = seekEntry(hashOf(key), key) + if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V] + } + + /** Defers to defaultEntry to find a default value for the key. Throws an + * exception if no other default behavior was specified. + */ + override def default(key: K): V = defaultEntry(key) + + private def repack(newMask: Int): Unit = { + val oh = _hashes + val ok = _keys + val ov = _values + mask = newMask + _hashes = new Array[Int](mask+1) + _keys = new Array[AnyRef](mask+1) + _values = new Array[AnyRef](mask+1) + _vacant = 0 + var i = 0 + while (i < oh.length) { + val h = oh(i) + if (h+h != 0) { + var e = h & mask + var x = 0 + while (_hashes(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } + _hashes(e) = h + _keys(e) = ok(i) + _values(e) = ov(i) + } + i += 1 + } + } + + /** Repacks the contents of this `AnyRefMap` for maximum efficiency of lookup. + * + * For maps that undergo a complex creation process with both addition and + * removal of keys, and then are used heavily with no further removal of + * elements, calling `repack` after the end of the creation can result in + * improved performance. Repacking takes time proportional to the number + * of entries in the map. + */ + def repack(): Unit = { + var m = mask + if (_size + _vacant >= 0.5*mask && !(_vacant > 0.2*mask)) m = ((m << 1) + 1) & IndexMask + while (m > 8 && 8*_size < m) m = m >>> 1 + repack(m) + } + + override def put(key: K, value: V): Option[V] = { + val h = hashOf(key) + val i = seekEntryOrOpen(h, key) + if (i < 0) { + val j = i & IndexMask + _hashes(j) = h + _keys(j) = key + _values(j) = value.asInstanceOf[AnyRef] + _size += 1 + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + None + } + else { + val ans = Some(_values(i).asInstanceOf[V]) + _hashes(i) = h + _values(i) = value.asInstanceOf[AnyRef] + ans + } + } + + /** Updates the map to include a new key-value pair. + * + * This is the fastest way to add an entry to an `AnyRefMap`. + */ + override def update(key: K, value: V): Unit = { + val h = hashOf(key) + val i = seekEntryOrOpen(h, key) + if (i < 0) { + val j = i & IndexMask + _hashes(j) = h + _keys(j) = key + _values(j) = value.asInstanceOf[AnyRef] + _size += 1 + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + } + else { + _hashes(i) = h + _values(i) = value.asInstanceOf[AnyRef] + } + } + + /** Adds a new key/value pair to this map and returns the map. */ + @deprecated("Use `addOne` or `update` instead; infix operations with an operand of multiple args will be deprecated", "2.13.3") + def +=(key: K, value: V): this.type = { update(key, value); this } + + /** Adds a new key/value pair to this map and returns the map. */ + @inline final def addOne(key: K, value: V): this.type = { update(key, value); this } + + @inline override final def addOne(kv: (K, V)): this.type = { update(kv._1, kv._2); this } + + def subtractOne(key: K): this.type = { + val i = seekEntry(hashOf(key), key) + if (i >= 0) { + _size -= 1 + _vacant += 1 + _hashes(i) = Int.MinValue + _keys(i) = null + _values(i) = null + } + this + } + + def iterator: Iterator[(K, V)] = new AnyRefMapIterator[(K, V)] { + protected def nextResult(k: K, v: V) = (k, v) + } + override def keysIterator: Iterator[K] = new AnyRefMapIterator[K] { + protected def nextResult(k: K, v: V) = k + } + override def valuesIterator: Iterator[V] = new AnyRefMapIterator[V] { + protected def nextResult(k: K, v: V) = v + } + + private abstract class AnyRefMapIterator[A] extends AbstractIterator[A] { + private[this] val hz = _hashes + private[this] val kz = _keys + private[this] val vz = _values + + private[this] var index = 0 + + def hasNext: Boolean = index= hz.length) return false + h = hz(index) + } + true + } + + def next(): A = { + if (hasNext) { + val ans = nextResult(kz(index).asInstanceOf[K], vz(index).asInstanceOf[V]) + index += 1 + ans + } + else throw new NoSuchElementException("next") + } + + protected def nextResult(k: K, v: V): A + } + + + override def foreach[U](f: ((K,V)) => U): Unit = { + var i = 0 + var e = _size + while (e > 0) { + while(i < _hashes.length && { val h = _hashes(i); h+h == 0 && i < _hashes.length}) i += 1 + if (i < _hashes.length) { + f((_keys(i).asInstanceOf[K], _values(i).asInstanceOf[V])) + i += 1 + e -= 1 + } + else return + } + } + + override def foreachEntry[U](f: (K,V) => U): Unit = { + var i = 0 + var e = _size + while (e > 0) { + while(i < _hashes.length && { val h = _hashes(i); h+h == 0 && i < _hashes.length}) i += 1 + if (i < _hashes.length) { + f(_keys(i).asInstanceOf[K], _values(i).asInstanceOf[V]) + i += 1 + e -= 1 + } + else return + } + } + + override def clone(): AnyRefMap[K, V] = { + val hz = java.util.Arrays.copyOf(_hashes, _hashes.length) + val kz = java.util.Arrays.copyOf(_keys, _keys.length) + val vz = java.util.Arrays.copyOf(_values, _values.length) + val arm = new AnyRefMap[K, V](defaultEntry, 1, false) + arm.initializeTo(mask, _size, _vacant, hz, kz, vz) + arm + } + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def + [V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = AnyRefMap.from(new View.Appended(this, kv)) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): AnyRefMap[K, V1] = { + val m = this + elem1 + elem2 + if(elems.isEmpty) m else m.concat(elems) + } + + override def concat[V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]): AnyRefMap[K, V2] = { + val arm = clone().asInstanceOf[AnyRefMap[K, V2]] + xs.iterator.foreach(kv => arm += kv) + arm + } + + override def ++[V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]): AnyRefMap[K, V2] = concat(xs) + + @deprecated("Use m.clone().addOne(k,v) instead of m.updated(k, v)", "2.13.0") + override def updated[V1 >: V](key: K, value: V1): AnyRefMap[K, V1] = + clone().asInstanceOf[AnyRefMap[K, V1]].addOne(key, value) + + private[this] def foreachElement[A,B](elems: Array[AnyRef], f: A => B): Unit = { + var i,j = 0 + while (i < _hashes.length & j < _size) { + val h = _hashes(i) + if (h+h != 0) { + j += 1 + f(elems(i).asInstanceOf[A]) + } + i += 1 + } + } + + /** Applies a function to all keys of this map. */ + def foreachKey[A](f: K => A): Unit = foreachElement[K,A](_keys, f) + + /** Applies a function to all values of this map. */ + def foreachValue[A](f: V => A): Unit = foreachElement[V,A](_values, f) + + /** Creates a new `AnyRefMap` with different values. + * Unlike `mapValues`, this method generates a new + * collection immediately. + */ + def mapValuesNow[V1](f: V => V1): AnyRefMap[K, V1] = { + val arm = new AnyRefMap[K,V1](AnyRefMap.exceptionDefault, 1, false) + val hz = java.util.Arrays.copyOf(_hashes, _hashes.length) + val kz = java.util.Arrays.copyOf(_keys, _keys.length) + val vz = new Array[AnyRef](_values.length) + var i,j = 0 + while (i < _hashes.length & j < _size) { + val h = _hashes(i) + if (h+h != 0) { + j += 1 + vz(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] + } + i += 1 + } + arm.initializeTo(mask, _size, _vacant, hz, kz, vz) + arm + } + + /** Applies a transformation function to all values stored in this map. + * Note: the default, if any, is not transformed. + */ + @deprecated("Use transformValuesInPlace instead of transformValues", "2.13.0") + @`inline` final def transformValues(f: V => V): this.type = transformValuesInPlace(f) + + /** Applies a transformation function to all values stored in this map. + * Note: the default, if any, is not transformed. + */ + def transformValuesInPlace(f: V => V): this.type = { + var i,j = 0 + while (i < _hashes.length & j < _size) { + val h = _hashes(i) + if (h+h != 0) { + j += 1 + _values(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] + } + i += 1 + } + this + } + + // The implicit dummy parameter is necessary to distinguish these methods from the base methods they overload (not override) + def map[K2 <: AnyRef, V2](f: ((K, V)) => (K2, V2))(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = + AnyRefMap.from(new View.Map(this, f)) + def flatMap[K2 <: AnyRef, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = + AnyRefMap.from(new View.FlatMap(this, f)) + def collect[K2 <: AnyRef, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = + strictOptimizedCollect(AnyRefMap.newBuilder[K2, V2], pf) + + override def clear(): Unit = { + import java.util.Arrays.fill + fill(_keys, null) + fill(_values, null) + fill(_hashes, 0) + _size = 0 + _vacant = 0 + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(AnyRefMap.toFactory[K, V](AnyRefMap), this) + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "AnyRefMap" +} + +object AnyRefMap { + private final val IndexMask = 0x3FFFFFFF + private final val MissingBit = 0x80000000 + private final val VacantBit = 0x40000000 + private final val MissVacant = 0xC0000000 + + private class ExceptionDefault extends (Any => Nothing) with Serializable { + def apply(k: Any): Nothing = throw new NoSuchElementException(if (k == null) "(null)" else k.toString) + } + private val exceptionDefault = new ExceptionDefault + + /** A builder for instances of `AnyRefMap`. + * + * This builder can be reused to create multiple instances. + */ + final class AnyRefMapBuilder[K <: AnyRef, V] extends ReusableBuilder[(K, V), AnyRefMap[K, V]] { + private[collection] var elems: AnyRefMap[K, V] = new AnyRefMap[K, V] + def addOne(entry: (K, V)): this.type = { + elems += entry + this + } + def clear(): Unit = elems = new AnyRefMap[K, V] + def result(): AnyRefMap[K, V] = elems + override def knownSize: Int = elems.knownSize + } + + /** Creates a new `AnyRefMap` with zero or more key/value pairs. */ + def apply[K <: AnyRef, V](elems: (K, V)*): AnyRefMap[K, V] = buildFromIterableOnce(elems) + + def newBuilder[K <: AnyRef, V]: ReusableBuilder[(K, V), AnyRefMap[K, V]] = new AnyRefMapBuilder[K, V] + + private def buildFromIterableOnce[K <: AnyRef, V](elems: IterableOnce[(K, V)]): AnyRefMap[K, V] = { + var sz = elems.knownSize + if(sz < 0) sz = 4 + val arm = new AnyRefMap[K, V](sz * 2) + elems.iterator.foreach{ case (k,v) => arm(k) = v } + if (arm.size < (sz>>3)) arm.repack() + arm + } + + /** Creates a new empty `AnyRefMap`. */ + def empty[K <: AnyRef, V]: AnyRefMap[K, V] = new AnyRefMap[K, V] + + /** Creates a new empty `AnyRefMap` with the supplied default */ + def withDefault[K <: AnyRef, V](default: K => V): AnyRefMap[K, V] = new AnyRefMap[K, V](default) + + /** Creates a new `AnyRefMap` from an existing source collection. A source collection + * which is already an `AnyRefMap` gets cloned. + * + * @param source Source collection + * @tparam K the type of the keys + * @tparam V the type of the values + * @return a new `AnyRefMap` with the elements of `source` + */ + def from[K <: AnyRef, V](source: IterableOnce[(K, V)]): AnyRefMap[K, V] = source match { + case source: AnyRefMap[_, _] => source.clone().asInstanceOf[AnyRefMap[K, V]] + case _ => buildFromIterableOnce(source) + } + + /** Creates a new `AnyRefMap` from arrays of keys and values. + * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`. + */ + def fromZip[K <: AnyRef, V](keys: Array[K], values: Array[V]): AnyRefMap[K, V] = { + val sz = math.min(keys.length, values.length) + val arm = new AnyRefMap[K, V](sz * 2) + var i = 0 + while (i < sz) { arm(keys(i)) = values(i); i += 1 } + if (arm.size < (sz>>3)) arm.repack() + arm + } + + /** Creates a new `AnyRefMap` from keys and values. + * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`. + */ + def fromZip[K <: AnyRef, V](keys: Iterable[K], values: Iterable[V]): AnyRefMap[K, V] = { + val sz = math.min(keys.size, values.size) + val arm = new AnyRefMap[K, V](sz * 2) + val ki = keys.iterator + val vi = values.iterator + while (ki.hasNext && vi.hasNext) arm(ki.next()) = vi.next() + if (arm.size < (sz >> 3)) arm.repack() + arm + } + + implicit def toFactory[K <: AnyRef, V](dummy: AnyRefMap.type): Factory[(K, V), AnyRefMap[K, V]] = ToFactory.asInstanceOf[Factory[(K, V), AnyRefMap[K, V]]] + + @SerialVersionUID(3L) + private[this] object ToFactory extends Factory[(AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] with Serializable { + def fromSpecific(it: IterableOnce[(AnyRef, AnyRef)]): AnyRefMap[AnyRef, AnyRef] = AnyRefMap.from[AnyRef, AnyRef](it) + def newBuilder: Builder[(AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] = AnyRefMap.newBuilder[AnyRef, AnyRef] + } + + implicit def toBuildFrom[K <: AnyRef, V](factory: AnyRefMap.type): BuildFrom[Any, (K, V), AnyRefMap[K, V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (K, V), AnyRefMap[K, V]]] + private[this] object ToBuildFrom extends BuildFrom[Any, (AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] { + def fromSpecific(from: Any)(it: IterableOnce[(AnyRef, AnyRef)]) = AnyRefMap.from(it) + def newBuilder(from: Any) = AnyRefMap.newBuilder[AnyRef, AnyRef] + } + + implicit def iterableFactory[K <: AnyRef, V]: Factory[(K, V), AnyRefMap[K, V]] = toFactory[K, V](this) + implicit def buildFromAnyRefMap[K <: AnyRef, V]: BuildFrom[AnyRefMap[_, _], (K, V), AnyRefMap[K, V]] = toBuildFrom(this) +} diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala b/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala new file mode 100644 index 000000000000..e3ddeb71ef8e --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala @@ -0,0 +1,403 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import java.util.Arrays + +import scala.annotation.nowarn +import scala.annotation.tailrec +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable + +/** An implementation of the `Buffer` class using an array to + * represent the assembled sequence internally. Append, update and random + * access take constant time (amortized time). Prepends and removes are + * linear in the buffer size. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#array-buffers "Scala's Collection Library overview"]] + * section on `Array Buffers` for more information. + + * + * @tparam A the type of this arraybuffer's elements. + * + * @define Coll `mutable.ArrayBuffer` + * @define coll array buffer + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(-1582447879429021880L) +class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) + extends AbstractBuffer[A] + with IndexedBuffer[A] + with IndexedSeqOps[A, ArrayBuffer, ArrayBuffer[A]] + with StrictOptimizedSeqOps[A, ArrayBuffer, ArrayBuffer[A]] + with IterableFactoryDefaults[A, ArrayBuffer] + with DefaultSerializable { + + def this() = this(new Array[AnyRef](ArrayBuffer.DefaultInitialSize), 0) + + def this(initialSize: Int) = this(new Array[AnyRef](initialSize max 1), 0) + + @transient private[this] var mutationCount: Int = 0 + + // needs to be `private[collection]` or `protected[collection]` for parallel-collections + protected[collection] var array: Array[AnyRef] = initialElements + protected var size0 = initialSize + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + shape.parUnbox(new ObjectArrayStepper(array, 0, length).asInstanceOf[AnyStepper[A] with EfficientSplit]) + } + + override def knownSize: Int = super[IndexedSeqOps].knownSize + + /** Ensure that the internal array has at least `n` cells. */ + protected def ensureSize(n: Int): Unit = { + array = ArrayBuffer.ensureSize(array, size0, n) + } + + // TODO 3.T: should be `protected`, perhaps `protected[this]` + /** Ensure that the internal array has at least `n` additional cells more than `size0`. */ + private[mutable] def ensureAdditionalSize(n: Int): Unit = { + // `.toLong` to ensure `Long` arithmetic is used and prevent `Int` overflow + array = ArrayBuffer.ensureSize(array, size0, size0.toLong + n) + } + + def sizeHint(size: Int): Unit = + if(size > length && size >= 1) ensureSize(size) + + /** Reduce length to `n`, nulling out all dropped elements */ + private def reduceToSize(n: Int): Unit = { + mutationCount += 1 + Arrays.fill(array, n, size0, null) + size0 = n + } + + /** Trims the ArrayBuffer to an appropriate size for the current + * number of elements (rounding up to the next natural size), + * which may replace the array by a shorter one. + * This allows releasing some unused memory. + */ + def trimToSize(): Unit = { + resize(length) + } + + /** Trims the `array` buffer size down to either a power of 2 + * or Int.MaxValue while keeping first `requiredLength` elements. + */ + private def resize(requiredLength: Int): Unit = + array = ArrayBuffer.downsize(array, requiredLength) + + @inline private def checkWithinBounds(lo: Int, hi: Int) = { + if (lo < 0) throw new IndexOutOfBoundsException(s"$lo is out of bounds (min 0, max ${size0 - 1})") + if (hi > size0) throw new IndexOutOfBoundsException(s"${hi - 1} is out of bounds (min 0, max ${size0 - 1})") + } + + def apply(n: Int): A = { + checkWithinBounds(n, n + 1) + array(n).asInstanceOf[A] + } + + def update(@deprecatedName("n", "2.13.0") index: Int, elem: A): Unit = { + checkWithinBounds(index, index + 1) + mutationCount += 1 + array(index) = elem.asInstanceOf[AnyRef] + } + + def length = size0 + + // TODO: return `IndexedSeqView` rather than `ArrayBufferView` + override def view: ArrayBufferView[A] = new ArrayBufferView(this, () => mutationCount) + + override def iterableFactory: SeqFactory[ArrayBuffer] = ArrayBuffer + + /** Note: This does not actually resize the internal representation. + * See clearAndShrink if you want to also resize internally + */ + def clear(): Unit = reduceToSize(0) + + /** + * Clears this buffer and shrinks to @param size (rounding up to the next + * natural size) + * @param size + */ + def clearAndShrink(size: Int = ArrayBuffer.DefaultInitialSize): this.type = { + clear() + resize(size) + this + } + + def addOne(elem: A): this.type = { + mutationCount += 1 + ensureAdditionalSize(1) + val oldSize = size0 + size0 = oldSize + 1 + this(oldSize) = elem + this + } + + // Overridden to use array copying for efficiency where possible. + override def addAll(elems: IterableOnce[A]): this.type = { + elems match { + case elems: ArrayBuffer[_] => + val elemsLength = elems.size0 + if (elemsLength > 0) { + mutationCount += 1 + ensureAdditionalSize(elemsLength) + Array.copy(elems.array, 0, array, length, elemsLength) + size0 = length + elemsLength + } + case _ => super.addAll(elems) + } + this + } + + def insert(@deprecatedName("n", "2.13.0") index: Int, elem: A): Unit = { + checkWithinBounds(index, index) + mutationCount += 1 + ensureAdditionalSize(1) + Array.copy(array, index, array, index + 1, size0 - index) + size0 += 1 + this(index) = elem + } + + def prepend(elem: A): this.type = { + insert(0, elem) + this + } + + def insertAll(@deprecatedName("n", "2.13.0") index: Int, elems: IterableOnce[A]): Unit = { + checkWithinBounds(index, index) + elems match { + case elems: collection.Iterable[A] => + val elemsLength = elems.size + if (elemsLength > 0) { + mutationCount += 1 + ensureAdditionalSize(elemsLength) + val len = size0 + Array.copy(array, index, array, index + elemsLength, len - index) + // if `elems eq this`, this copy is safe because + // - `elems.array eq this.array` + // - we didn't overwrite the values being inserted after moving them in + // the previous line + // - `copyElemsToArray` will call `System.arraycopy` + // - `System.arraycopy` will effectively "read" all the values before + // overwriting any of them when two arrays are the the same reference + val actual = IterableOnce.copyElemsToArray(elems, array.asInstanceOf[Array[Any]], index, elemsLength) + if (actual != elemsLength) throw new IllegalStateException(s"Copied $actual of $elemsLength") + size0 = len + elemsLength // update size AFTER the copy, in case we're inserting a proxy + } + case _ => insertAll(index, ArrayBuffer.from(elems)) + } + } + + /** Note: This does not actually resize the internal representation. + * See trimToSize if you want to also resize internally + */ + def remove(@deprecatedName("n", "2.13.0") index: Int): A = { + checkWithinBounds(index, index + 1) + val res = this(index) + Array.copy(array, index + 1, array, index, size0 - (index + 1)) + reduceToSize(size0 - 1) + res + } + + /** Note: This does not actually resize the internal representation. + * See trimToSize if you want to also resize internally + */ + def remove(@deprecatedName("n", "2.13.0") index: Int, count: Int): Unit = + if (count > 0) { + checkWithinBounds(index, index + count) + Array.copy(array, index + count, array, index, size0 - (index + count)) + reduceToSize(size0 - count) + } else if (count < 0) { + throw new IllegalArgumentException("removing negative number of elements: " + count) + } + + @deprecated("Use 'this' instance instead", "2.13.0") + @deprecatedOverriding("ArrayBuffer[A] no longer extends Builder[A, ArrayBuffer[A]]", "2.13.0") + @inline def result(): this.type = this + + @deprecated("Use 'new GrowableBuilder(this).mapResult(f)' instead", "2.13.0") + @deprecatedOverriding("ArrayBuffer[A] no longer extends Builder[A, ArrayBuffer[A]]", "2.13.0") + @inline def mapResult[NewTo](f: (ArrayBuffer[A]) => NewTo): Builder[A, NewTo] = new GrowableBuilder[A, ArrayBuffer[A]](this).mapResult(f) + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "ArrayBuffer" + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if(copied > 0) { + Array.copy(array, 0, xs, start, copied) + } + copied + } + + /** Sorts this $coll in place according to an Ordering. + * + * @see [[scala.collection.mutable.IndexedSeqOps.sortInPlace]] + * @param ord the ordering to be used to compare elements. + * @return modified input $coll sorted according to the ordering `ord`. + */ + override def sortInPlace[B >: A]()(implicit ord: Ordering[B]): this.type = { + if (length > 1) { + mutationCount += 1 + scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]], 0, length) + } + this + } + + @tailrec private def foldl[B](start: Int, end: Int, z: B, op: (B, A) => B): B = + if (start == end) z + else foldl(start + 1, end, op(z, array(start).asInstanceOf[A]), op) + + @tailrec private def foldr[B](start: Int, end: Int, z: B, op: (A, B) => B): B = + if (start == end) z + else foldr(start, end - 1, op(array(end - 1).asInstanceOf[A], z), op) + + override def foldLeft[B](z: B)(op: (B, A) => B): B = foldl(0, length, z, op) + + override def foldRight[B](z: B)(op: (A, B) => B): B = foldr(0, length, z, op) + + override def reduceLeft[B >: A](op: (B, A) => B): B = if (length > 0) foldl(1, length, array(0).asInstanceOf[B], op) else super.reduceLeft(op) + + override def reduceRight[B >: A](op: (A, B) => B): B = if (length > 0) foldr(0, length - 1, array(length - 1).asInstanceOf[B], op) else super.reduceRight(op) +} + +/** + * Factory object for the `ArrayBuffer` class. + * + * $factoryInfo + * + * @define coll array buffer + * @define Coll `mutable.ArrayBuffer` + */ +@SerialVersionUID(3L) +object ArrayBuffer extends StrictOptimizedSeqFactory[ArrayBuffer] { + final val DefaultInitialSize = 16 + private[this] val emptyArray = new Array[AnyRef](0) + + def from[B](coll: collection.IterableOnce[B]): ArrayBuffer[B] = { + val k = coll.knownSize + if (k >= 0) { + // Avoid reallocation of buffer if length is known + val array = ensureSize(emptyArray, 0, k) // don't duplicate sizing logic, and check VM array size limit + val actual = IterableOnce.copyElemsToArray(coll, array.asInstanceOf[Array[Any]]) + if (actual != k) throw new IllegalStateException(s"Copied $actual of $k") + new ArrayBuffer[B](array, k) + } + else new ArrayBuffer[B] ++= coll + } + + def newBuilder[A]: Builder[A, ArrayBuffer[A]] = + new GrowableBuilder[A, ArrayBuffer[A]](empty) { + override def sizeHint(size: Int): Unit = elems.ensureSize(size) + } + + def empty[A]: ArrayBuffer[A] = new ArrayBuffer[A]() + + /** + * @param arrayLen the length of the backing array + * @param targetLen the minimum length to resize up to + * @return -1 if no resizing is needed, or the size for the new array otherwise + */ + private def resizeUp(arrayLen: Long, targetLen: Long): Int = { + if (targetLen <= arrayLen) -1 + else { + if (targetLen > Int.MaxValue) throw new Exception(s"Collections cannot have more than ${Int.MaxValue} elements") + IterableOnce.checkArraySizeWithinVMLimit(targetLen.toInt) // safe because `targetSize <= Int.MaxValue` + + val newLen = math.max(targetLen, math.max(arrayLen * 2, DefaultInitialSize)) + math.min(newLen, scala.runtime.PStatics.VM_MaxArraySize).toInt + } + } + // if necessary, copy (curSize elements of) the array to a new array of capacity n. + // Should use Array.copyOf(array, resizeEnsuring(array.length))? + private def ensureSize(array: Array[AnyRef], curSize: Int, targetSize: Long): Array[AnyRef] = { + val newLen = resizeUp(array.length, targetSize) + if (newLen < 0) array + else { + val res = new Array[AnyRef](newLen) + System.arraycopy(array, 0, res, 0, curSize) + res + } + } + + /** + * @param arrayLen the length of the backing array + * @param targetLen the length to resize down to, if smaller than `arrayLen` + * @return -1 if no resizing is needed, or the size for the new array otherwise + */ + private def resizeDown(arrayLen: Int, targetLen: Int): Int = + if (targetLen >= arrayLen) -1 else math.max(targetLen, 0) + private def downsize(array: Array[AnyRef], targetSize: Int): Array[AnyRef] = { + val newLen = resizeDown(array.length, targetSize) + if (newLen < 0) array + else if (newLen == 0) emptyArray + else { + val res = new Array[AnyRef](newLen) + System.arraycopy(array, 0, res, 0, targetSize) + res + } + } +} + +// TODO: use `CheckedIndexedSeqView.Id` once we can change the return type of `ArrayBuffer#view` +final class ArrayBufferView[A] private[mutable](underlying: ArrayBuffer[A], mutationCount: () => Int) + extends AbstractIndexedSeqView[A] { + @deprecated("never intended to be public; call ArrayBuffer#view instead", since = "2.13.7") + def this(array: Array[AnyRef], length: Int) = { + // this won't actually track mutation, but it would be a pain to have the implementation + // check if we have a method to get the current mutation count or not on every method and + // change what it does based on that. hopefully no one ever calls this. + this({ + val _array = array + val _length = length + new ArrayBuffer[A](0) { + this.array = _array + this.size0 = _length + } + }, () => 0) + } + + @deprecated("never intended to be public", since = "2.13.7") + def array: Array[AnyRef] = underlying.toArray[Any].asInstanceOf[Array[AnyRef]] + + @throws[IndexOutOfBoundsException] + def apply(n: Int): A = underlying(n) + def length: Int = underlying.length + override protected[this] def className = "ArrayBufferView" + + // we could inherit all these from `CheckedIndexedSeqView`, except this class is public + override def iterator: Iterator[A] = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) + override def reverseIterator: Iterator[A] = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) + + override def appended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) + override def prepended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) + override def take(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Take(this, n)(mutationCount) + override def takeRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) + override def drop(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) + override def dropRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) + override def map[B](f: A => B): IndexedSeqView[B] = new CheckedIndexedSeqView.Map(this, f)(mutationCount) + override def reverse: IndexedSeqView[A] = new CheckedIndexedSeqView.Reverse(this)(mutationCount) + override def slice(from: Int, until: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) + override def tapEach[U](f: A => U): IndexedSeqView[A] = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) + + override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) +} diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala new file mode 100644 index 000000000000..454527bcdebd --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala @@ -0,0 +1,522 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.reflect.ClassTag + +/** A builder class for arrays. + * + * @tparam T the type of the elements for the builder. + */ +@SerialVersionUID(3L) +sealed abstract class ArrayBuilder[T] + extends ReusableBuilder[T, Array[T]] + with Serializable { + protected[this] var capacity: Int = 0 + protected[this] def elems: Array[T] + protected var size: Int = 0 + + def length: Int = size + + override def knownSize: Int = size + + protected[this] final def ensureSize(size: Int): Unit = { + if (capacity < size || capacity == 0) { + var newsize = if (capacity == 0) 16 else capacity * 2 + while (newsize < size) newsize *= 2 + resize(newsize) + } + } + + override final def sizeHint(size: Int): Unit = + if (capacity < size) resize(size) + + def clear(): Unit = size = 0 + + protected[this] def resize(size: Int): Unit + + /** Add all elements of an array */ + def addAll(xs: Array[_ <: T]): this.type = addAll(xs, 0, xs.length) + + /** Add a slice of an array */ + def addAll(xs: Array[_ <: T], offset: Int, length: Int): this.type = { + ensureSize(this.size + length) + Array.copy(xs, offset, elems, this.size, length) + size += length + this + } + + override def addAll(xs: IterableOnce[T]): this.type = { + val k = xs.knownSize + if (k > 0) { + ensureSize(this.size + k) + val actual = IterableOnce.copyElemsToArray(xs, elems, this.size) + if (actual != k) throw new IllegalStateException(s"Copied $actual of $k") + size += k + } else if (k < 0) super.addAll(xs) + this + } +} + +/** A companion object for array builders. + */ +object ArrayBuilder { + + /** Creates a new arraybuilder of type `T`. + * + * @tparam T type of the elements for the array builder, with a `ClassTag` context bound. + * @return a new empty array builder. + */ + @inline def make[T: ClassTag]: ArrayBuilder[T] = { + val tag = implicitly[ClassTag[T]] + tag.runtimeClass match { + case java.lang.Byte.TYPE => new ArrayBuilder.ofByte().asInstanceOf[ArrayBuilder[T]] + case java.lang.Short.TYPE => new ArrayBuilder.ofShort().asInstanceOf[ArrayBuilder[T]] + case java.lang.Character.TYPE => new ArrayBuilder.ofChar().asInstanceOf[ArrayBuilder[T]] + case java.lang.Integer.TYPE => new ArrayBuilder.ofInt().asInstanceOf[ArrayBuilder[T]] + case java.lang.Long.TYPE => new ArrayBuilder.ofLong().asInstanceOf[ArrayBuilder[T]] + case java.lang.Float.TYPE => new ArrayBuilder.ofFloat().asInstanceOf[ArrayBuilder[T]] + case java.lang.Double.TYPE => new ArrayBuilder.ofDouble().asInstanceOf[ArrayBuilder[T]] + case java.lang.Boolean.TYPE => new ArrayBuilder.ofBoolean().asInstanceOf[ArrayBuilder[T]] + case java.lang.Void.TYPE => new ArrayBuilder.ofUnit().asInstanceOf[ArrayBuilder[T]] + case _ => new ArrayBuilder.ofRef[T with AnyRef]()(tag.asInstanceOf[ClassTag[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]] + } + } + + /** A class for array builders for arrays of reference types. + * + * This builder can be reused. + * + * @tparam T type of elements for the array builder, subtype of `AnyRef` with a `ClassTag` context bound. + */ + @SerialVersionUID(3L) + final class ofRef[T <: AnyRef](implicit ct: ClassTag[T]) extends ArrayBuilder[T] { + + protected var elems: Array[T] = _ + + private def mkArray(size: Int): Array[T] = { + if (capacity == size && capacity > 0) elems + else if (elems eq null) new Array[T](size) + else java.util.Arrays.copyOf[T](elems, size) + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: T): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[T] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def clear(): Unit = { + super.clear() + if(elems ne null) java.util.Arrays.fill(elems.asInstanceOf[Array[AnyRef]], null) + } + + override def equals(other: Any): Boolean = other match { + case x: ofRef[_] => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofRef" + } + + /** A class for array builders for arrays of `byte`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofByte extends ArrayBuilder[Byte] { + + protected var elems: Array[Byte] = _ + + private def mkArray(size: Int): Array[Byte] = { + val newelems = new Array[Byte](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Byte): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Byte] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofByte => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofByte" + } + + /** A class for array builders for arrays of `short`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofShort extends ArrayBuilder[Short] { + + protected var elems: Array[Short] = _ + + private def mkArray(size: Int): Array[Short] = { + val newelems = new Array[Short](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Short): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Short] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofShort => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofShort" + } + + /** A class for array builders for arrays of `char`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofChar extends ArrayBuilder[Char] { + + protected var elems: Array[Char] = _ + + private def mkArray(size: Int): Array[Char] = { + val newelems = new Array[Char](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Char): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Char] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofChar => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofChar" + } + + /** A class for array builders for arrays of `int`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofInt extends ArrayBuilder[Int] { + + protected var elems: Array[Int] = _ + + private def mkArray(size: Int): Array[Int] = { + val newelems = new Array[Int](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Int): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Int] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofInt => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofInt" + } + + /** A class for array builders for arrays of `long`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofLong extends ArrayBuilder[Long] { + + protected var elems: Array[Long] = _ + + private def mkArray(size: Int): Array[Long] = { + val newelems = new Array[Long](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Long): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Long] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofLong => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofLong" + } + + /** A class for array builders for arrays of `float`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofFloat extends ArrayBuilder[Float] { + + protected var elems: Array[Float] = _ + + private def mkArray(size: Int): Array[Float] = { + val newelems = new Array[Float](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Float): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Float] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofFloat => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofFloat" + } + + /** A class for array builders for arrays of `double`s. It can be reused. */ + @SerialVersionUID(3L) + final class ofDouble extends ArrayBuilder[Double] { + + protected var elems: Array[Double] = _ + + private def mkArray(size: Int): Array[Double] = { + val newelems = new Array[Double](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Double): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Double] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofDouble => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofDouble" + } + + /** A class for array builders for arrays of `boolean`s. It can be reused. */ + @SerialVersionUID(3L) + class ofBoolean extends ArrayBuilder[Boolean] { + + protected var elems: Array[Boolean] = _ + + private def mkArray(size: Int): Array[Boolean] = { + val newelems = new Array[Boolean](size) + if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) + newelems + } + + protected[this] def resize(size: Int): Unit = { + elems = mkArray(size) + capacity = size + } + + def addOne(elem: Boolean): this.type = { + ensureSize(size + 1) + elems(size) = elem + size += 1 + this + } + + def result(): Array[Boolean] = { + if (capacity != 0 && capacity == size) { + capacity = 0 + val res = elems + elems = null + res + } + else mkArray(size) + } + + override def equals(other: Any): Boolean = other match { + case x: ofBoolean => (size == x.size) && (elems == x.elems) + case _ => false + } + + override def toString = "ArrayBuilder.ofBoolean" + } + + /** A class for array builders for arrays of `Unit` type. It can be reused. */ + @SerialVersionUID(3L) + final class ofUnit extends ArrayBuilder[Unit] { + + protected def elems: Array[Unit] = throw new UnsupportedOperationException() + + def addOne(elem: Unit): this.type = { + size += 1 + this + } + + override def addAll(xs: IterableOnce[Unit]): this.type = { + size += xs.iterator.size + this + } + + override def addAll(xs: Array[_ <: Unit], offset: Int, length: Int): this.type = { + size += length + this + } + + def result() = { + val ans = new Array[Unit](size) + var i = 0 + while (i < size) { ans(i) = (); i += 1 } + ans + } + + override def equals(other: Any): Boolean = other match { + case x: ofUnit => (size == x.size) + case _ => false + } + + protected[this] def resize(size: Int): Unit = () + + override def toString = "ArrayBuilder.ofUnit" + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala b/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala new file mode 100644 index 000000000000..205e1607f824 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala @@ -0,0 +1,645 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.annotation.nowarn +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.reflect.ClassTag + +/** An implementation of a double-ended queue that internally uses a resizable circular buffer. + * + * Append, prepend, removeHead, removeLast and random-access (indexed-lookup and indexed-replacement) + * take amortized constant time. In general, removals and insertions at i-th index are O(min(i, n-i)) + * and thus insertions and removals from end/beginning are fast. + * + * @note Subclasses ''must'' override the `ofArray` protected method to return a more specific type. + * + * @tparam A the type of this ArrayDeque's elements. + * + * @define Coll `mutable.ArrayDeque` + * @define coll array deque + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +class ArrayDeque[A] protected ( + protected var array: Array[AnyRef], + private[ArrayDeque] var start: Int, + private[ArrayDeque] var end: Int +) extends AbstractBuffer[A] + with IndexedBuffer[A] + with IndexedSeqOps[A, ArrayDeque, ArrayDeque[A]] + with StrictOptimizedSeqOps[A, ArrayDeque, ArrayDeque[A]] + with IterableFactoryDefaults[A, ArrayDeque] + with ArrayDequeOps[A, ArrayDeque, ArrayDeque[A]] + with Cloneable[ArrayDeque[A]] + with DefaultSerializable { + + reset(array, start, end) + + private[this] def reset(array: Array[AnyRef], start: Int, end: Int) = { + assert((array.length & (array.length - 1)) == 0, s"Array.length must be power of 2") + requireBounds(idx = start, until = array.length) + requireBounds(idx = end, until = array.length) + this.array = array + this.start = start + this.end = end + } + + def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = this(ArrayDeque.alloc(initialSize), start = 0, end = 0) + + override def knownSize: Int = super[IndexedSeqOps].knownSize + + // No-Op override to allow for more efficient stepper in a minor release. + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = super.stepper(shape) + + def apply(idx: Int): A = { + requireBounds(idx) + _get(idx) + } + + def update(idx: Int, elem: A): Unit = { + requireBounds(idx) + _set(idx, elem) + } + + def addOne(elem: A): this.type = { + ensureSize(length + 1) + appendAssumingCapacity(elem) + } + + def prepend(elem: A): this.type = { + ensureSize(length + 1) + prependAssumingCapacity(elem) + } + + @inline private[ArrayDeque] def appendAssumingCapacity(elem: A): this.type = { + array(end) = elem.asInstanceOf[AnyRef] + end = end_+(1) + this + } + + @inline private[ArrayDeque] def prependAssumingCapacity(elem: A): this.type = { + start = start_-(1) + array(start) = elem.asInstanceOf[AnyRef] + this + } + + override def prependAll(elems: IterableOnce[A]): this.type = { + val it = elems.iterator + if (it.nonEmpty) { + val n = length + // The following code resizes the current collection at most once and traverses elems at most twice + elems.knownSize match { + // Size is too expensive to compute AND we can traverse it only once - can't do much but retry with an IndexedSeq + case srcLength if srcLength < 0 => prependAll(it.to(IndexedSeq: Factory[A, IndexedSeq[A]] /* type ascription needed by Dotty */)) + + // We know for sure we need to resize to hold everything, might as well resize and memcopy upfront + case srcLength if mustGrow(srcLength + n) => + val finalLength = srcLength + n + val array2 = ArrayDeque.alloc(finalLength) + it.copyToArray(array2.asInstanceOf[Array[A]]) + copySliceToArray(srcStart = 0, dest = array2, destStart = srcLength, maxItems = n) + reset(array = array2, start = 0, end = finalLength) + + // Just fill up from (start - srcLength) to (start - 1) and move back start + case srcLength => + // Optimized version of `elems.zipWithIndex.foreach((elem, i) => _set(i - srcLength, elem))` + var i = 0 + while(i < srcLength) { + _set(i - srcLength, it.next()) + i += 1 + } + start = start_-(srcLength) + } + } + this + } + + override def addAll(elems: IterableOnce[A]): this.type = { + elems.knownSize match { + case srcLength if srcLength > 0 => + ensureSize(srcLength + length) + elems.iterator.foreach(appendAssumingCapacity) + case _ => elems.iterator.foreach(+=) + } + this + } + + def insert(idx: Int, elem: A): Unit = { + requireBounds(idx, length+1) + val n = length + if (idx == 0) { + prepend(elem) + } else if (idx == n) { + addOne(elem) + } else { + val finalLength = n + 1 + if (mustGrow(finalLength)) { + val array2 = ArrayDeque.alloc(finalLength) + copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx) + array2(idx) = elem.asInstanceOf[AnyRef] + copySliceToArray(srcStart = idx, dest = array2, destStart = idx + 1, maxItems = n) + reset(array = array2, start = 0, end = finalLength) + } else if (n <= idx * 2) { + var i = n - 1 + while(i >= idx) { + _set(i + 1, _get(i)) + i -= 1 + } + end = end_+(1) + i += 1 + _set(i, elem) + } else { + var i = 0 + while(i < idx) { + _set(i - 1, _get(i)) + i += 1 + } + start = start_-(1) + _set(i, elem) + } + } + } + + def insertAll(idx: Int, elems: IterableOnce[A]): Unit = { + requireBounds(idx, length+1) + val n = length + if (idx == 0) { + prependAll(elems) + } else if (idx == n) { + addAll(elems) + } else { + // Get both an iterator and the length of the source (by copying the source to an IndexedSeq if needed) + val (it, srcLength) = { + val _srcLength = elems.knownSize + if (_srcLength >= 0) (elems.iterator, _srcLength) + else { + val indexed = IndexedSeq.from(elems) + (indexed.iterator, indexed.size) + } + } + if (it.nonEmpty) { + val finalLength = srcLength + n + // Either we resize right away or move prefix left or suffix right + if (mustGrow(finalLength)) { + val array2 = ArrayDeque.alloc(finalLength) + copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx) + it.copyToArray(array2.asInstanceOf[Array[A]], idx) + copySliceToArray(srcStart = idx, dest = array2, destStart = idx + srcLength, maxItems = n) + reset(array = array2, start = 0, end = finalLength) + } else if (2*idx >= n) { // Cheaper to shift the suffix right + var i = n - 1 + while(i >= idx) { + _set(i + srcLength, _get(i)) + i -= 1 + } + end = end_+(srcLength) + while(it.hasNext) { + i += 1 + _set(i, it.next()) + } + } else { // Cheaper to shift prefix left + var i = 0 + while(i < idx) { + _set(i - srcLength, _get(i)) + i += 1 + } + start = start_-(srcLength) + while(it.hasNext) { + _set(i, it.next()) + i += 1 + } + } + } + } + } + + def remove(idx: Int, count: Int): Unit = { + if (count > 0) { + requireBounds(idx) + val n = length + val removals = Math.min(n - idx, count) + val finalLength = n - removals + val suffixStart = idx + removals + // If we know we can resize after removing, do it right away using arrayCopy + // Else, choose the shorter: either move the prefix (0 until idx) right OR the suffix (idx+removals until n) left + if (shouldShrink(finalLength)) { + val array2 = ArrayDeque.alloc(finalLength) + copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx) + copySliceToArray(srcStart = suffixStart, dest = array2, destStart = idx, maxItems = n) + reset(array = array2, start = 0, end = finalLength) + } else if (2*idx <= finalLength) { // Cheaper to move the prefix right + var i = suffixStart - 1 + while(i >= removals) { + _set(i, _get(i - removals)) + i -= 1 + } + while(i >= 0) { + _set(i, null.asInstanceOf[A]) + i -= 1 + } + start = start_+(removals) + } else { // Cheaper to move the suffix left + var i = idx + while(i < finalLength) { + _set(i, _get(i + removals)) + i += 1 + } + while(i < n) { + _set(i, null.asInstanceOf[A]) + i += 1 + } + end = end_-(removals) + } + } else { + require(count == 0, s"removing negative number of elements: $count") + } + } + + def remove(idx: Int): A = { + val elem = this(idx) + remove(idx, 1) + elem + } + + override def subtractOne(elem: A): this.type = { + val idx = indexOf(elem) + if (idx >= 0) remove(idx, 1) //TODO: SeqOps should be fluent API + this + } + + /** + * + * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while + * @return + */ + def removeHeadOption(resizeInternalRepr: Boolean = false): Option[A] = + if (isEmpty) None else Some(removeHeadAssumingNonEmpty(resizeInternalRepr)) + + /** + * Unsafely remove the first element (throws exception when empty) + * See also removeHeadOption() + * + * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while + * @throws NoSuchElementException when empty + * @return + */ + def removeHead(resizeInternalRepr: Boolean = false): A = + if (isEmpty) throw new NoSuchElementException(s"empty collection") else removeHeadAssumingNonEmpty(resizeInternalRepr) + + @inline private[this] def removeHeadAssumingNonEmpty(resizeInternalRepr: Boolean = false): A = { + val elem = array(start) + array(start) = null + start = start_+(1) + if (resizeInternalRepr) resize(length) + elem.asInstanceOf[A] + } + + /** + * + * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while + * @return + */ + def removeLastOption(resizeInternalRepr: Boolean = false): Option[A] = + if (isEmpty) None else Some(removeLastAssumingNonEmpty(resizeInternalRepr)) + + /** + * Unsafely remove the last element (throws exception when empty) + * See also removeLastOption() + * + * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while + * @throws NoSuchElementException when empty + * @return + */ + def removeLast(resizeInternalRepr: Boolean = false): A = + if (isEmpty) throw new NoSuchElementException(s"empty collection") else removeLastAssumingNonEmpty(resizeInternalRepr) + + @`inline` private[this] def removeLastAssumingNonEmpty(resizeInternalRepr: Boolean = false): A = { + end = end_-(1) + val elem = array(end) + array(end) = null + if (resizeInternalRepr) resize(length) + elem.asInstanceOf[A] + } + + /** + * Remove all elements from this collection and return the elements while emptying this data structure + * @return + */ + def removeAll(): scala.collection.immutable.Seq[A] = { + val elems = scala.collection.immutable.Seq.newBuilder[A] + elems.sizeHint(length) + while(nonEmpty) { + elems += removeHeadAssumingNonEmpty() + } + elems.result() + } + + /** + * Remove all elements from this collection and return the elements in reverse while emptying this data structure + * @return + */ + def removeAllReverse(): scala.collection.immutable.Seq[A] = { + val elems = scala.collection.immutable.Seq.newBuilder[A] + elems.sizeHint(length) + while(nonEmpty) { + elems += removeLastAssumingNonEmpty() + } + elems.result() + } + + /** + * Returns and removes all elements from the left of this queue which satisfy the given predicate + * + * @param f the predicate used for choosing elements + * @return + */ + def removeHeadWhile(f: A => Boolean): scala.collection.immutable.Seq[A] = { + val elems = scala.collection.immutable.Seq.newBuilder[A] + while(headOption.exists(f)) { + elems += removeHeadAssumingNonEmpty() + } + elems.result() + } + + /** + * Returns and removes all elements from the right of this queue which satisfy the given predicate + * + * @param f the predicate used for choosing elements + * @return + */ + def removeLastWhile(f: A => Boolean): scala.collection.immutable.Seq[A] = { + val elems = scala.collection.immutable.Seq.newBuilder[A] + while(lastOption.exists(f)) { + elems += removeLastAssumingNonEmpty() + } + elems.result() + } + + /** Returns the first element which satisfies the given predicate after or at some start index + * and removes this element from the collections + * + * @param p the predicate used for choosing the first element + * @param from the start index + * @return the first element of the queue for which p yields true + */ + def removeFirst(p: A => Boolean, from: Int = 0): Option[A] = { + val i = indexWhere(p, from) + if (i < 0) None else Some(remove(i)) + } + + /** Returns all elements in this collection which satisfy the given predicate + * and removes those elements from this collections. + * + * @param p the predicate used for choosing elements + * @return a sequence of all elements in the queue for which + * p yields true. + */ + def removeAll(p: A => Boolean): scala.collection.immutable.Seq[A] = { + val res = scala.collection.immutable.Seq.newBuilder[A] + var i, j = 0 + while (i < size) { + if (p(this(i))) { + res += this(i) + } else { + if (i != j) { + this(j) = this(i) + } + j += 1 + } + i += 1 + } + if (i != j) takeInPlace(j) + res.result() + } + + @inline def ensureSize(hint: Int) = if (hint > length && mustGrow(hint)) resize(hint) + + def length = end_-(start) + + override def isEmpty = start == end + + override protected def klone(): ArrayDeque[A] = new ArrayDeque(array.clone(), start = start, end = end) + + override def iterableFactory: SeqFactory[ArrayDeque] = ArrayDeque + + /** + * Note: This does not actually resize the internal representation. + * See clearAndShrink if you want to also resize internally + */ + def clear(): Unit = { + while(nonEmpty) { + removeHeadAssumingNonEmpty() + } + } + + /** + * Clears this buffer and shrinks to @param size + * + * @param size + * @return + */ + def clearAndShrink(size: Int = ArrayDeque.DefaultInitialSize): this.type = { + reset(array = ArrayDeque.alloc(size), start = 0, end = 0) + this + } + + protected def ofArray(array: Array[AnyRef], end: Int): ArrayDeque[A] = + new ArrayDeque[A](array, start = 0, end) + + override def copyToArray[B >: A](dest: Array[B], destStart: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, dest.length, destStart, len) + if (copied > 0) { + copySliceToArray(srcStart = 0, dest = dest, destStart = destStart, maxItems = len) + } + copied + } + + override def toArray[B >: A: ClassTag]: Array[B] = + copySliceToArray(srcStart = 0, dest = new Array[B](length), destStart = 0, maxItems = length) + + /** + * Trims the capacity of this ArrayDeque's instance to be the current size + */ + def trimToSize(): Unit = resize(length) + + // Utils for common modular arithmetic: + @inline protected def start_+(idx: Int) = (start + idx) & (array.length - 1) + @inline private[this] def start_-(idx: Int) = (start - idx) & (array.length - 1) + @inline private[this] def end_+(idx: Int) = (end + idx) & (array.length - 1) + @inline private[this] def end_-(idx: Int) = (end - idx) & (array.length - 1) + + // Note: here be overflow dragons! This is used for int overflow + // assumptions in resize(). Use caution changing. + @inline private[this] def mustGrow(len: Int) = { + len >= array.length + } + + // Assumes that 0 <= len < array.length! + @inline private[this] def shouldShrink(len: Int) = { + // To avoid allocation churn, only shrink when array is large + // and less than 2/5 filled. + array.length > ArrayDeque.StableSize && array.length - len - (len >> 1) > len + } + + // Assumes that 0 <= len < array.length! + @inline private[this] def canShrink(len: Int) = { + array.length > ArrayDeque.DefaultInitialSize && array.length - len > len + } + + @inline private[this] def _get(idx: Int): A = array(start_+(idx)).asInstanceOf[A] + + @inline private[this] def _set(idx: Int, elem: A) = array(start_+(idx)) = elem.asInstanceOf[AnyRef] + + // Assumes that 0 <= len. + private[this] def resize(len: Int) = if (mustGrow(len) || canShrink(len)) { + val n = length + val array2 = copySliceToArray(srcStart = 0, dest = ArrayDeque.alloc(len), destStart = 0, maxItems = n) + reset(array = array2, start = 0, end = n) + } + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "ArrayDeque" +} + +/** + * $factoryInfo + * @define coll array deque + * @define Coll `ArrayDeque` + */ +@SerialVersionUID(3L) +object ArrayDeque extends StrictOptimizedSeqFactory[ArrayDeque] { + + def from[B](coll: collection.IterableOnce[B]): ArrayDeque[B] = { + val s = coll.knownSize + if (s >= 0) { + val array = alloc(s) + val actual = IterableOnce.copyElemsToArray(coll, array.asInstanceOf[Array[Any]]) + if (actual != s) throw new IllegalStateException(s"Copied $actual of $s") + new ArrayDeque[B](array, start = 0, end = s) + } else new ArrayDeque[B]() ++= coll + } + + def newBuilder[A]: Builder[A, ArrayDeque[A]] = + new GrowableBuilder[A, ArrayDeque[A]](empty) { + override def sizeHint(size: Int): Unit = { + elems.ensureSize(size) + } + } + + def empty[A]: ArrayDeque[A] = new ArrayDeque[A]() + + final val DefaultInitialSize = 16 + + /** + * We try to not repeatedly resize arrays smaller than this + */ + private[ArrayDeque] final val StableSize = 128 + + /** + * Allocates an array whose size is next power of 2 > `len` + * Largest possible len is 1<<30 - 1 + * + * @param len + * @return + */ + private[mutable] def alloc(len: Int) = { + require(len >= 0, s"Non-negative array size required") + val size = (1 << 31) >>> java.lang.Integer.numberOfLeadingZeros(len) << 1 + require(size >= 0, s"ArrayDeque too big - cannot allocate ArrayDeque of length $len") + new Array[AnyRef](Math.max(size, DefaultInitialSize)) + } +} + +trait ArrayDequeOps[A, +CC[_], +C <: AnyRef] extends StrictOptimizedSeqOps[A, CC, C] { + protected def array: Array[AnyRef] + + final override def clone(): C = klone() + + protected def klone(): C + + protected def ofArray(array: Array[AnyRef], end: Int): C + + protected def start_+(idx: Int): Int + + @inline protected final def requireBounds(idx: Int, until: Int = length): Unit = + if (idx < 0 || idx >= until) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${until-1})") + + /** + * This is a more general version of copyToArray - this also accepts a srcStart unlike copyToArray + * This copies maxItems elements from this collections srcStart to dest's destStart + * If we reach the end of either collections before we could copy maxItems, we simply stop copying + * + * @param dest + * @param srcStart + * @param destStart + * @param maxItems + */ + def copySliceToArray(srcStart: Int, dest: Array[_], destStart: Int, maxItems: Int): dest.type = { + requireBounds(destStart, dest.length+1) + val toCopy = Math.min(maxItems, Math.min(length - srcStart, dest.length - destStart)) + if (toCopy > 0) { + requireBounds(srcStart) + val startIdx = start_+(srcStart) + val block1 = Math.min(toCopy, array.length - startIdx) + Array.copy(src = array, srcPos = startIdx, dest = dest, destPos = destStart, length = block1) + val block2 = toCopy - block1 + if (block2 > 0) Array.copy(src = array, srcPos = 0, dest = dest, destPos = destStart + block1, length = block2) + } + dest + } + + override def reverse: C = { + val n = length + val arr = ArrayDeque.alloc(n) + var i = 0 + while(i < n) { + arr(i) = this(n - i - 1).asInstanceOf[AnyRef] + i += 1 + } + ofArray(arr, n) + } + + override def slice(from: Int, until: Int): C = { + val n = length + val left = Math.max(0, Math.min(n, from)) + val right = Math.max(0, Math.min(n, until)) + val len = right - left + if (len <= 0) { + empty + } else if (len >= n) { + klone() + } else { + val array2 = copySliceToArray(srcStart = left, dest = ArrayDeque.alloc(len), destStart = 0, maxItems = len) + ofArray(array2, len) + } + } + + override def sliding(window: Int, step: Int): Iterator[C] = { + require(window > 0 && step > 0, s"window=$window and step=$step, but both must be positive") + length match { + case 0 => Iterator.empty + case n if n <= window => Iterator.single(slice(0, length)) + case n => + val lag = if (window > step) window - step else 0 + Iterator.range(start = 0, end = n - lag, step = step).map(i => slice(i, i + window)) + } + } + + override def grouped(n: Int): Iterator[C] = sliding(n, n) +} diff --git a/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala new file mode 100644 index 000000000000..74ab6b2107e5 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala @@ -0,0 +1,347 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import java.util.Arrays + +import scala.collection.Stepper.EfficientSplit +import scala.collection.convert.impl._ +import scala.reflect.ClassTag +import scala.util.hashing.MurmurHash3 + +/** + * A collection representing `Array[T]`. Unlike `ArrayBuffer` it is always backed by the same + * underlying `Array`, therefore it is not growable or shrinkable. + * + * @tparam T type of the elements in this wrapped array. + * + * @define Coll `ArraySeq` + * @define coll wrapped array + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(3L) +sealed abstract class ArraySeq[T] + extends AbstractSeq[T] + with IndexedSeq[T] + with IndexedSeqOps[T, ArraySeq, ArraySeq[T]] + with StrictOptimizedSeqOps[T, ArraySeq, ArraySeq[T]] + with Serializable { + + override def iterableFactory: scala.collection.SeqFactory[ArraySeq] = ArraySeq.untagged + + override protected def fromSpecific(coll: scala.collection.IterableOnce[T]): ArraySeq[T] = { + val b = ArrayBuilder.make(elemTag).asInstanceOf[ArrayBuilder[T]] + val s = coll.knownSize + if(s > 0) b.sizeHint(s) + b ++= coll + ArraySeq.make(b.result()) + } + override protected def newSpecificBuilder: Builder[T, ArraySeq[T]] = ArraySeq.newBuilder(elemTag).asInstanceOf[Builder[T, ArraySeq[T]]] + override def empty: ArraySeq[T] = ArraySeq.empty(elemTag.asInstanceOf[ClassTag[T]]) + + /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive + * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype + * or subtype of the element type. */ + def elemTag: ClassTag[_] + + /** Update element at given index */ + def update(@deprecatedName("idx", "2.13.0") index: Int, elem: T): Unit + + /** The underlying array. Its element type does not have to be equal to the element type of this ArraySeq. A primitive + * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype + * or subtype of the element type. */ + def array: Array[_] + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit + + override protected[this] def className = "ArraySeq" + + /** Clones this object, including the underlying Array. */ + override def clone(): ArraySeq[T] = ArraySeq.make(array.clone()).asInstanceOf[ArraySeq[T]] + + override def copyToArray[B >: T](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if(copied > 0) { + Array.copy(array, 0, xs, start, copied) + } + copied + } + + override def equals(other: Any): Boolean = other match { + case that: ArraySeq[_] if this.array.length != that.array.length => + false + case _ => + super.equals(other) + } + + override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq[T] = + ArraySeq.make(array.sorted(ord.asInstanceOf[Ordering[Any]])).asInstanceOf[ArraySeq[T]] + + override def sortInPlace[B >: T]()(implicit ord: Ordering[B]): this.type = { + if (length > 1) scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]]) + this + } +} + +/** A companion object used to create instances of `ArraySeq`. + */ +@SerialVersionUID(3L) +object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => + val untagged: SeqFactory[ArraySeq] = new ClassTagSeqFactory.AnySeqDelegate(self) + + // This is reused for all calls to empty. + private[this] val EmptyArraySeq = new ofRef[AnyRef](new Array[AnyRef](0)) + def empty[T : ClassTag]: ArraySeq[T] = EmptyArraySeq.asInstanceOf[ArraySeq[T]] + + def from[A : ClassTag](it: scala.collection.IterableOnce[A]): ArraySeq[A] = make(Array.from[A](it)) + + def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] = ArrayBuilder.make[A].mapResult(make) + + /** + * Wrap an existing `Array` into a `ArraySeq` of the proper primitive specialization type + * without copying. + * + * Note that an array containing boxed primitives can be converted to a `ArraySeq` without + * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime, + * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast: + * `ArraySeq.make(a).asInstanceOf[ArraySeq[Int]]`. The values are still + * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing + * `ArraySeq.make(a.asInstanceOf[Array[Int]])` does not work, it throws a `ClassCastException` + * at runtime. + */ + def make[T](x: Array[T]): ArraySeq[T] = ((x.asInstanceOf[Array[_]]: @unchecked) match { + case null => null + case x: Array[AnyRef] => new ofRef[AnyRef](x) + case x: Array[Int] => new ofInt(x) + case x: Array[Double] => new ofDouble(x) + case x: Array[Long] => new ofLong(x) + case x: Array[Float] => new ofFloat(x) + case x: Array[Char] => new ofChar(x) + case x: Array[Byte] => new ofByte(x) + case x: Array[Short] => new ofShort(x) + case x: Array[Boolean] => new ofBoolean(x) + case x: Array[Unit] => new ofUnit(x) + }).asInstanceOf[ArraySeq[T]] + + @SerialVersionUID(3L) + final class ofRef[T <: AnyRef](val array: Array[T]) extends ArraySeq[T] { + def elemTag = ClassTag[T](array.getClass.getComponentType) + def length: Int = array.length + def apply(index: Int): T = array(index) + def update(index: Int, elem: T): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofRef[_] => + Array.equals( + this.array.asInstanceOf[Array[AnyRef]], + that.array.asInstanceOf[Array[AnyRef]]) + case _ => super.equals(that) + } + override def iterator: Iterator[T] = new ArrayOps.ArrayIterator[T](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + new ObjectArrayStepper(array, 0, array.length) + else shape.parUnbox(new ObjectArrayStepper(array, 0, array.length).asInstanceOf[AnyStepper[T] with EfficientSplit]) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofByte(val array: Array[Byte]) extends ArraySeq[Byte] { + def elemTag = ClassTag.Byte + def length: Int = array.length + def apply(index: Int): Byte = array(index) + def update(index: Int, elem: Byte): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofByte => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Byte] = new ArrayOps.ArrayIterator[Byte](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Byte, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedByteArrayStepper(array, 0, array.length)) + else new WidenedByteArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofShort(val array: Array[Short]) extends ArraySeq[Short] { + def elemTag = ClassTag.Short + def length: Int = array.length + def apply(index: Int): Short = array(index) + def update(index: Int, elem: Short): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofShort => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Short] = new ArrayOps.ArrayIterator[Short](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Short, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedShortArrayStepper(array, 0, array.length)) + else new WidenedShortArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofChar(val array: Array[Char]) extends ArraySeq[Char] { + def elemTag = ClassTag.Char + def length: Int = array.length + def apply(index: Int): Char = array(index) + def update(index: Int, elem: Char): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofChar => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Char] = new ArrayOps.ArrayIterator[Char](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedCharArrayStepper(array, 0, array.length)) + else new WidenedCharArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { + val jsb = sb.underlying + if (start.length != 0) jsb.append(start) + val len = array.length + if (len != 0) { + if (sep.isEmpty) jsb.append(array) + else { + jsb.ensureCapacity(jsb.length + len + end.length + (len - 1) * sep.length) + jsb.append(array(0)) + var i = 1 + while (i < len) { + jsb.append(sep) + jsb.append(array(i)) + i += 1 + } + } + } + if (end.length != 0) jsb.append(end) + sb + } + } + + @SerialVersionUID(3L) + final class ofInt(val array: Array[Int]) extends ArraySeq[Int] { + def elemTag = ClassTag.Int + def length: Int = array.length + def apply(index: Int): Int = array(index) + def update(index: Int, elem: Int): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofInt => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Int] = new ArrayOps.ArrayIterator[Int](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new IntArrayStepper(array, 0, array.length)) + else new IntArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofLong(val array: Array[Long]) extends ArraySeq[Long] { + def elemTag = ClassTag.Long + def length: Int = array.length + def apply(index: Int): Long = array(index) + def update(index: Int, elem: Long): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofLong => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Long] = new ArrayOps.ArrayIterator[Long](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Long, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParLongStepper(new LongArrayStepper(array, 0, array.length)) + else new LongArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofFloat(val array: Array[Float]) extends ArraySeq[Float] { + def elemTag = ClassTag.Float + def length: Int = array.length + def apply(index: Int): Float = array(index) + def update(index: Int, elem: Float): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofFloat => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Float] = new ArrayOps.ArrayIterator[Float](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Float, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new WidenedFloatArrayStepper(array, 0, array.length)) + else new WidenedFloatArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofDouble(val array: Array[Double]) extends ArraySeq[Double] { + def elemTag = ClassTag.Double + def length: Int = array.length + def apply(index: Int): Double = array(index) + def update(index: Int, elem: Double): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofDouble => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Double] = new ArrayOps.ArrayIterator[Double](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Double, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new DoubleArrayStepper(array, 0, array.length)) + else new DoubleArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofBoolean(val array: Array[Boolean]) extends ArraySeq[Boolean] { + def elemTag = ClassTag.Boolean + def length: Int = array.length + def apply(index: Int): Boolean = array(index) + def update(index: Int, elem: Boolean): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofBoolean => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Boolean] = new ArrayOps.ArrayIterator[Boolean](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Boolean, S]): S with EfficientSplit = + new BoxedBooleanArrayStepper(array, 0, array.length).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofUnit(val array: Array[Unit]) extends ArraySeq[Unit] { + def elemTag = ClassTag.Unit + def length: Int = array.length + def apply(index: Int): Unit = array(index) + def update(index: Int, elem: Unit): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofUnit => array.length == that.array.length + case _ => super.equals(that) + } + override def iterator: Iterator[Unit] = new ArrayOps.ArrayIterator[Unit](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Unit, S]): S with EfficientSplit = + new ObjectArrayStepper[AnyRef](array.asInstanceOf[Array[AnyRef]], 0, array.length).asInstanceOf[S with EfficientSplit] + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/BitSet.scala b/tests/pos-special/stdlib/collection/mutable/BitSet.scala new file mode 100644 index 000000000000..69ecc122c1f9 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/BitSet.scala @@ -0,0 +1,392 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.collection.immutable.Range +import BitSetOps.{LogWL, MaxSize} +import scala.annotation.implicitNotFound + +/** + * A class for mutable bitsets. + * + * $bitsetinfo + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#mutable-bitsets "Scala's Collection Library overview"]] + * section on `Mutable Bitsets` for more information. + * + * @define Coll `BitSet` + * @define coll bitset + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +class BitSet(protected[collection] final var elems: Array[Long]) + extends AbstractSet[Int] + with SortedSet[Int] + with SortedSetOps[Int, SortedSet, BitSet] + with StrictOptimizedIterableOps[Int, Set, BitSet] + with StrictOptimizedSortedSetOps[Int, SortedSet, BitSet] + with collection.BitSet + with collection.BitSetOps[BitSet] + with Serializable { + + def this(initSize: Int) = this(new Array[Long](math.max((initSize + 63) >> 6, 1))) + + def this() = this(0) + + override protected def fromSpecific(coll: IterableOnce[Int]): BitSet = bitSetFactory.fromSpecific(coll) + override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder + override def empty: BitSet = bitSetFactory.empty + + def bitSetFactory = BitSet + + override def unsorted: Set[Int] = this + + protected[collection] final def nwords: Int = elems.length + + protected[collection] final def word(idx: Int): Long = + if (idx < nwords) elems(idx) else 0L + + protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): BitSet = + if (elems.length == 0) empty + else new BitSet(elems) + + def addOne(elem: Int): this.type = { + require(elem >= 0) + if (!contains(elem)) { + val idx = elem >> LogWL + updateWord(idx, word(idx) | (1L << elem)) + } + this + } + + def subtractOne(elem: Int): this.type = { + require(elem >= 0) + if (contains(elem)) { + val idx = elem >> LogWL + updateWord(idx, word(idx) & ~(1L << elem)) + } + this + } + + def clear(): Unit = { + elems = new Array[Long](elems.length) + } + + protected final def updateWord(idx: Int, w: Long): Unit = { + ensureCapacity(idx) + elems(idx) = w + } + + protected final def ensureCapacity(idx: Int): Unit = { + require(idx < MaxSize) + if (idx >= nwords) { + var newlen = nwords + while (idx >= newlen) newlen = math.min(newlen * 2, MaxSize) + val elems1 = new Array[Long](newlen) + Array.copy(elems, 0, elems1, 0, nwords) + elems = elems1 + } + } + + def unconstrained: collection.Set[Int] = this + + /** Updates this bitset to the union with another bitset by performing a bitwise "or". + * + * @param other the bitset to form the union with. + * @return the bitset itself. + */ + def |= (other: collection.BitSet): this.type = { + ensureCapacity(other.nwords - 1) + var i = 0 + val othernwords = other.nwords + while (i < othernwords) { + elems(i) = elems(i) | other.word(i) + i += 1 + } + this + } + /** Updates this bitset to the intersection with another bitset by performing a bitwise "and". + * + * @param other the bitset to form the intersection with. + * @return the bitset itself. + */ + def &= (other: collection.BitSet): this.type = { + // Different from other operations: no need to ensure capacity because + // anything beyond the capacity is 0. Since we use other.word which is 0 + // off the end, we also don't need to make sure we stay in bounds there. + var i = 0 + val thisnwords = nwords + while (i < thisnwords) { + elems(i) = elems(i) & other.word(i) + i += 1 + } + this + } + /** Updates this bitset to the symmetric difference with another bitset by performing a bitwise "xor". + * + * @param other the bitset to form the symmetric difference with. + * @return the bitset itself. + */ + def ^= (other: collection.BitSet): this.type = { + ensureCapacity(other.nwords - 1) + var i = 0 + val othernwords = other.nwords + while (i < othernwords) { + + elems(i) = elems(i) ^ other.word(i) + i += 1 + } + this + } + /** Updates this bitset to the difference with another bitset by performing a bitwise "and-not". + * + * @param other the bitset to form the difference with. + * @return the bitset itself. + */ + def &~= (other: collection.BitSet): this.type = { + var i = 0 + val max = Math.min(nwords, other.nwords) + while (i < max) { + elems(i) = elems(i) & ~other.word(i) + i += 1 + } + this + } + + override def clone(): BitSet = new BitSet(java.util.Arrays.copyOf(elems, elems.length)) + + def toImmutable: immutable.BitSet = immutable.BitSet.fromBitMask(elems) + + override def map(f: Int => Int): BitSet = strictOptimizedMap(newSpecificBuilder, f) + override def map[B](f: Int => B)(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].map(f) + + override def flatMap(f: Int => IterableOnce[Int]): BitSet = strictOptimizedFlatMap(newSpecificBuilder, f) + override def flatMap[B](f: Int => IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].flatMap(f) + + override def collect(pf: PartialFunction[Int, Int]): BitSet = strictOptimizedCollect(newSpecificBuilder, pf) + override def collect[B](pf: scala.PartialFunction[Int, B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] = + super[StrictOptimizedSortedSetOps].collect(pf) + + // necessary for disambiguation + override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.zipOrdMsg) ev: Ordering[(Int, B)]): SortedSet[(Int, B)] = + super.zip(that) + + override def addAll(xs: IterableOnce[Int]): this.type = xs match { + case bs: collection.BitSet => + this |= bs + case range: Range => + if (range.nonEmpty) { + val start = range.min + if (start >= 0) { + val end = range.max + val endIdx = end >> LogWL + ensureCapacity(endIdx) + + if (range.step == 1 || range.step == -1) { + val startIdx = start >> LogWL + val wordStart = startIdx * BitSetOps.WordLength + val wordMask = -1L << (start - wordStart) + + if (endIdx > startIdx) { + elems(startIdx) |= wordMask + java.util.Arrays.fill(elems, startIdx + 1, endIdx, -1L) + elems(endIdx) |= -1L >>> (BitSetOps.WordLength - (end - endIdx * BitSetOps.WordLength) - 1) + } else elems(endIdx) |= (wordMask & (-1L >>> (BitSetOps.WordLength - (end - wordStart) - 1))) + } else super.addAll(range) + } else super.addAll(range) + } + this + + case sorted: collection.SortedSet[Int] => + // if `sorted` is using the regular Int ordering, ensure capacity for the largest + // element up front to avoid multiple resizing allocations + if (sorted.nonEmpty) { + val ord = sorted.ordering + if (ord eq Ordering.Int) { + ensureCapacity(sorted.lastKey >> LogWL) + } else if (ord eq Ordering.Int.reverse) { + ensureCapacity(sorted.firstKey >> LogWL) + } + val iter = sorted.iterator + while (iter.hasNext) { + addOne(iter.next()) + } + } + + this + + case other => + super.addAll(other) + } + + override def subsetOf(that: collection.Set[Int]): Boolean = that match { + case bs: collection.BitSet => + val thisnwords = this.nwords + val bsnwords = bs.nwords + val minWords = Math.min(thisnwords, bsnwords) + + // if any bits are set to `1` in words out of range of `bs`, then this is not a subset. Start there + var i = bsnwords + while (i < thisnwords) { + if (word(i) != 0L) return false + i += 1 + } + + // the higher range of `this` is all `0`s, fall back to lower range + var j = 0 + while (j < minWords) { + if ((word(j) & ~bs.word(j)) != 0L) return false + j += 1 + } + + true + case other => + super.subsetOf(other) + } + + override def subtractAll(xs: IterableOnce[Int]): this.type = xs match { + case bs: collection.BitSet => this &~= bs + case other => super.subtractAll(other) + } + + protected[this] def writeReplace(): AnyRef = new BitSet.SerializationProxy(this) + + override def diff(that: collection.Set[Int]): BitSet = that match { + case bs: collection.BitSet => + /* + * Algorithm: + * + * We iterate, word-by-word, backwards from the shortest of the two bitsets (this, or bs) i.e. the one with + * the fewer words. + * + * Array Shrinking: + * If `this` is not longer than `bs`, then since we must iterate through the full array of words, + * we can track the new highest index word which is non-zero, at little additional cost. At the end, the new + * Array[Long] allocated for the returned BitSet will only be of size `maxNonZeroIndex + 1` + */ + + val bsnwords = bs.nwords + val thisnwords = nwords + if (bsnwords >= thisnwords) { + // here, we may have opportunity to shrink the size of the array + // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length + var i = thisnwords - 1 + var currentWord = 0L + + while (i >= 0 && currentWord == 0L) { + val oldWord = word(i) + currentWord = oldWord & ~bs.word(i) + i -= 1 + } + + if (i < 0) { + fromBitMaskNoCopy(Array(currentWord)) + } else { + val minimumNonZeroIndex: Int = i + 1 + val newArray = elems.take(minimumNonZeroIndex + 1) + newArray(i + 1) = currentWord + while (i >= 0) { + newArray(i) = word(i) & ~bs.word(i) + i -= 1 + } + fromBitMaskNoCopy(newArray) + } + } else { + // here, there is no opportunity to shrink the array size, no use in tracking highest non-zero index + val newElems = elems.clone() + var i = bsnwords - 1 + while (i >= 0) { + newElems(i) = word(i) & ~bs.word(i) + i -= 1 + } + fromBitMaskNoCopy(newElems) + } + case _ => super.diff(that) + } + + override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = { + // We filter the BitSet from highest to lowest, so we can determine exactly the highest non-zero word + // index which lets us avoid: + // * over-allocating -- the resulting array will be exactly the right size + // * multiple resizing allocations -- the array is allocated one time, not log(n) times. + var i = nwords - 1 + var newArray: Array[Long] = null + while (i >= 0) { + val w = BitSetOps.computeWordForFilter(pred, isFlipped, word(i), i) + if (w != 0L) { + if (newArray eq null) { + newArray = new Array(i + 1) + } + newArray(i) = w + } + i -= 1 + } + if (newArray eq null) { + empty + } else { + fromBitMaskNoCopy(newArray) + } + } + + override def filterInPlace(p: Int => Boolean): this.type = { + val thisnwords = nwords + var i = 0 + while (i < thisnwords) { + elems(i) = BitSetOps.computeWordForFilter(p, isFlipped = false, elems(i), i) + i += 1 + } + this + } + + override def toBitMask: Array[Long] = elems.clone() +} + +@SerialVersionUID(3L) +object BitSet extends SpecificIterableFactory[Int, BitSet] { + + def fromSpecific(it: scala.collection.IterableOnce[Int]): BitSet = Growable.from(empty, it) + + def empty: BitSet = new BitSet() + + def newBuilder: Builder[Int, BitSet] = new GrowableBuilder(empty) + + /** A bitset containing all the bits in an array */ + def fromBitMask(elems: Array[Long]): BitSet = { + val len = elems.length + if (len == 0) empty + else { + val a = java.util.Arrays.copyOf(elems, len) + new BitSet(a) + } + } + + /** A bitset containing all the bits in an array, wrapping the existing + * array without copying. + */ + def fromBitMaskNoCopy(elems: Array[Long]): BitSet = { + val len = elems.length + if (len == 0) empty + else new BitSet(elems) + } + + @SerialVersionUID(3L) + private final class SerializationProxy(coll: BitSet) extends scala.collection.BitSet.SerializationProxy(coll) { + protected[this] def readResolve(): Any = BitSet.fromBitMaskNoCopy(elems) + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/Buffer.scala b/tests/pos-special/stdlib/collection/mutable/Buffer.scala index 0a70c75bac0c..847b924735ce 100644 --- a/tests/pos-special/stdlib/collection/mutable/Buffer.scala +++ b/tests/pos-special/stdlib/collection/mutable/Buffer.scala @@ -14,7 +14,6 @@ package scala.collection package mutable import scala.annotation.nowarn -import language.experimental.captureChecking /** A `Buffer` is a growable and shrinkable `Seq`. */ @@ -49,19 +48,19 @@ trait Buffer[A] /** Appends the elements contained in a iterable object to this buffer. * @param xs the iterable object containing the elements to append. */ - @`inline` final def appendAll(xs: IterableOnce[A]^): this.type = addAll(xs) + @`inline` final def appendAll(xs: IterableOnce[A]): this.type = addAll(xs) /** Alias for `prepend` */ @`inline` final def +=: (elem: A): this.type = prepend(elem) - def prependAll(elems: IterableOnce[A]^): this.type = { insertAll(0, elems); this } + def prependAll(elems: IterableOnce[A]): this.type = { insertAll(0, elems); this } @deprecated("Use prependAll instead", "2.13.0") @`inline` final def prepend(elems: A*): this.type = prependAll(elems) /** Alias for `prependAll` */ - @inline final def ++=:(elems: IterableOnce[A]^): this.type = prependAll(elems) + @inline final def ++=:(elems: IterableOnce[A]): this.type = prependAll(elems) /** Inserts a new element at a given index into this buffer. * @@ -82,7 +81,7 @@ trait Buffer[A] * @throws IndexOutOfBoundsException if `idx` is out of bounds. */ @throws[IndexOutOfBoundsException] - def insertAll(idx: Int, elems: IterableOnce[A]^): Unit + def insertAll(idx: Int, elems: IterableOnce[A]): Unit /** Removes the element at a given index position. * @@ -104,7 +103,7 @@ trait Buffer[A] @throws[IndexOutOfBoundsException] @throws[IllegalArgumentException] def remove(idx: Int, count: Int): Unit - + /** Removes a single element from this buffer, at its first occurrence. * If the buffer does not contain that element, it is unchanged. * @@ -133,7 +132,7 @@ trait Buffer[A] @deprecated("use dropRightInPlace instead", since = "2.13.4") def trimEnd(n: Int): Unit = dropRightInPlace(n) - def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A]^, replaced: Int): this.type + def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A], replaced: Int): this.type // +=, ++=, clear inherited from Growable // Per remark of @ichoran, we should preferably not have these: @@ -181,11 +180,11 @@ trait IndexedBuffer[A] extends IndexedSeq[A] override def iterableFactory: SeqFactory[IndexedBuffer] = IndexedBuffer - def flatMapInPlace(f: A => IterableOnce[A]^): this.type = { + def flatMapInPlace(f: A => IterableOnce[A]): this.type = { // There's scope for a better implementation which copies elements in place. var i = 0 val s = size - val newElems = new Array[IterableOnce[A]^](s) + val newElems = new Array[IterableOnce[A]](s) while (i < s) { newElems(i) = f(this(i)); i += 1 } clear() i = 0 @@ -208,7 +207,7 @@ trait IndexedBuffer[A] extends IndexedSeq[A] if (i == j) this else takeInPlace(j) } - def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A]^, replaced: Int): this.type = { + def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A], replaced: Int): this.type = { val replaced0 = math.min(math.max(replaced, 0), length) val i = math.min(math.max(from, 0), length) var j = 0 diff --git a/tests/pos-special/stdlib/collection/mutable/Builder.scala b/tests/pos-special/stdlib/collection/mutable/Builder.scala index dd57cb75da91..0ecc06dff061 100644 --- a/tests/pos-special/stdlib/collection/mutable/Builder.scala +++ b/tests/pos-special/stdlib/collection/mutable/Builder.scala @@ -12,9 +12,6 @@ package scala.collection.mutable -import language.experimental.captureChecking - - /** Base trait for collection builders. * * After calling `result()` the behavior of a Builder (which is not also a [[scala.collection.mutable.ReusableBuilder]]) @@ -23,8 +20,7 @@ import language.experimental.captureChecking * * @see [[scala.collection.mutable.ReusableBuilder]] for Builders which can be reused after calling `result()` */ -trait Builder[-A, +To] extends Growable[A] { - self: Builder[A, To]^ => +trait Builder[-A, +To] extends Growable[A] { self => /** Clears the contents of this builder. * After execution of this method the builder will contain no elements. @@ -55,7 +51,7 @@ trait Builder[-A, +To] extends Growable[A] { * @param coll the collection which serves as a hint for the result's size. * @param delta a correction to add to the `coll.size` to produce the size hint. */ - final def sizeHint(coll: scala.collection.IterableOnce[_]^, delta: Int = 0): Unit = { + final def sizeHint(coll: scala.collection.IterableOnce[_], delta: Int = 0): Unit = { val s = coll.knownSize if (s != -1) sizeHint(s + delta) } @@ -73,7 +69,7 @@ trait Builder[-A, +To] extends Growable[A] { * than collection's size are reduced. */ // should probably be `boundingColl: IterableOnce[_]`, but binary compatibility - final def sizeHintBounded(size: Int, boundingColl: scala.collection.Iterable[_]^): Unit = { + final def sizeHintBounded(size: Int, boundingColl: scala.collection.Iterable[_]): Unit = { val s = boundingColl.knownSize if (s != -1) { sizeHint(scala.math.min(s, size)) @@ -81,10 +77,10 @@ trait Builder[-A, +To] extends Growable[A] { } /** A builder resulting from this builder my mapping the result using `f`. */ - def mapResult[NewTo](f: To => NewTo): Builder[A, NewTo]^{this, f} = new Builder[A, NewTo] { + def mapResult[NewTo](f: To => NewTo): Builder[A, NewTo] = new Builder[A, NewTo] { def addOne(x: A): this.type = { self += x; this } def clear(): Unit = self.clear() - override def addAll(xs: IterableOnce[A]^): this.type = { self ++= xs; this } + override def addAll(xs: IterableOnce[A]): this.type = { self ++= xs; this } override def sizeHint(size: Int): Unit = self.sizeHint(size) def result(): NewTo = f(self.result()) override def knownSize: Int = self.knownSize diff --git a/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala b/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala new file mode 100644 index 000000000000..b9598904375d --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala @@ -0,0 +1,117 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +private[mutable] trait CheckedIndexedSeqView[+A] extends IndexedSeqView[A] { + protected val mutationCount: () => Int + + override def iterator: Iterator[A] = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) + override def reverseIterator: Iterator[A] = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) + + override def appended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) + override def prepended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) + override def take(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Take(this, n)(mutationCount) + override def takeRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) + override def drop(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) + override def dropRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) + override def map[B](f: A => B): IndexedSeqView[B] = new CheckedIndexedSeqView.Map(this, f)(mutationCount) + override def reverse: IndexedSeqView[A] = new CheckedIndexedSeqView.Reverse(this)(mutationCount) + override def slice(from: Int, until: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) + override def tapEach[U](f: A => U): IndexedSeqView[A] = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) + + override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) +} + +private[mutable] object CheckedIndexedSeqView { + import IndexedSeqView.SomeIndexedSeqOps + + @SerialVersionUID(3L) + private[mutable] class CheckedIterator[A](self: IndexedSeqView[A], mutationCount: => Int) + extends IndexedSeqView.IndexedSeqViewIterator[A](self) { + private[this] val expectedCount = mutationCount + override def hasNext: Boolean = { + MutationTracker.checkMutationsForIteration(expectedCount, mutationCount) + super.hasNext + } + } + + @SerialVersionUID(3L) + private[mutable] class CheckedReverseIterator[A](self: IndexedSeqView[A], mutationCount: => Int) + extends IndexedSeqView.IndexedSeqViewReverseIterator[A](self) { + private[this] val expectedCount = mutationCount + override def hasNext: Boolean = { + MutationTracker.checkMutationsForIteration(expectedCount, mutationCount) + super.hasNext + } + } + + @SerialVersionUID(3L) + class Id[+A](underlying: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + extends IndexedSeqView.Id(underlying) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Appended[+A](underlying: SomeIndexedSeqOps[A], elem: A)(protected val mutationCount: () => Int) + extends IndexedSeqView.Appended(underlying, elem) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + extends IndexedSeqView.Prepended(elem, underlying) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Concat[A](prefix: SomeIndexedSeqOps[A], suffix: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + extends IndexedSeqView.Concat[A](prefix, suffix) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Take[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.Take(underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class TakeRight[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.TakeRight(underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Drop[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.Drop[A](underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class DropRight[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.DropRight[A](underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Map[A, B](underlying: SomeIndexedSeqOps[A], f: A => B)(protected val mutationCount: () => Int) + extends IndexedSeqView.Map(underlying, f) with CheckedIndexedSeqView[B] + + @SerialVersionUID(3L) + class Reverse[A](underlying: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + extends IndexedSeqView.Reverse[A](underlying) with CheckedIndexedSeqView[A] { + override def reverse: IndexedSeqView[A] = underlying match { + case x: IndexedSeqView[A] => x + case _ => super.reverse + } + } + + @SerialVersionUID(3L) + class Slice[A](underlying: SomeIndexedSeqOps[A], from: Int, until: Int)(protected val mutationCount: () => Int) + extends AbstractIndexedSeqView[A] with CheckedIndexedSeqView[A] { + protected val lo = from max 0 + protected val hi = (until max 0) min underlying.length + protected val len = (hi - lo) max 0 + @throws[IndexOutOfBoundsException] + def apply(i: Int): A = underlying(lo + i) + def length: Int = len + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/Cloneable.scala b/tests/pos-special/stdlib/collection/mutable/Cloneable.scala new file mode 100644 index 000000000000..940ecf3549ad --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Cloneable.scala @@ -0,0 +1,22 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + + +/** A trait for cloneable collections. + * + * @tparam C Type of the collection, covariant and with reference types as upperbound. + */ +trait Cloneable[+C <: AnyRef] extends scala.Cloneable { + override def clone(): C = super.clone().asInstanceOf[C] +} diff --git a/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala b/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala new file mode 100644 index 000000000000..8542b5b56a01 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala @@ -0,0 +1,888 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.{unchecked => uc} +import scala.annotation.{implicitNotFound, tailrec, unused} +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.generic.DefaultSerializationProxy +import scala.runtime.Statics + +/** This class implements mutable maps using a hashtable with red-black trees in the buckets for good + * worst-case performance on hash collisions. An `Ordering` is required for the element type. Equality + * as determined by the `Ordering` has to be consistent with `equals` and `hashCode`. Universal equality + * of numeric types is not supported (similar to `AnyRefMap`). + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] + * section on `Hash Tables` for more information. + * + * @define Coll `mutable.CollisionProofHashMap` + * @define coll mutable collision-proof hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double)(implicit ordering: Ordering[K]) + extends AbstractMap[K, V] + with MapOps[K, V, Map, CollisionProofHashMap[K, V]] //-- + with StrictOptimizedIterableOps[(K, V), Iterable, CollisionProofHashMap[K, V]] + with StrictOptimizedMapOps[K, V, Map, CollisionProofHashMap[K, V]] { //-- + + private[this] final def sortedMapFactory: SortedMapFactory[CollisionProofHashMap] = CollisionProofHashMap + + def this()(implicit ordering: Ordering[K]) = this(CollisionProofHashMap.defaultInitialCapacity, CollisionProofHashMap.defaultLoadFactor)(ordering) + + import CollisionProofHashMap.Node + private[this] type RBNode = CollisionProofHashMap.RBNode[K, V] + private[this] type LLNode = CollisionProofHashMap.LLNode[K, V] + + /** The actual hash table. */ + private[this] var table: Array[Node] = new Array[Node](tableSizeFor(initialCapacity)) + + /** The next size value at which to resize (capacity * load factor). */ + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def size: Int = contentSize + + @`inline` private[this] final def computeHash(o: K): Int = { + val h = if(o.asInstanceOf[AnyRef] eq null) 0 else o.hashCode + h ^ (h >>> 16) + } + + @`inline` private[this] final def index(hash: Int) = hash & (table.length - 1) + + override protected def fromSpecific(coll: IterableOnce[(K, V)] @uncheckedVariance): CollisionProofHashMap[K, V] @uncheckedVariance = CollisionProofHashMap.from(coll) + override protected def newSpecificBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] @uncheckedVariance = CollisionProofHashMap.newBuilder[K, V] + + override def empty: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] + + override def contains(key: K): Boolean = findNode(key) ne null + + def get(key: K): Option[V] = findNode(key) match { + case null => None + case nd => Some(nd match { + case nd: LLNode @uc => nd.value + case nd: RBNode @uc => nd.value + }) + } + + @throws[NoSuchElementException] + override def apply(key: K): V = findNode(key) match { + case null => default(key) + case nd => nd match { + case nd: LLNode @uc => nd.value + case nd: RBNode @uc => nd.value + } + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val nd = findNode(key) + if (nd eq null) default else nd match { + case nd: LLNode @uc => nd.value + case n => n.asInstanceOf[RBNode].value + } + } + + @`inline` private[this] def findNode(elem: K): Node = { + val hash = computeHash(elem) + table(index(hash)) match { + case null => null + case n: LLNode @uc => n.getNode(elem, hash) + case n => n.asInstanceOf[RBNode].getNode(elem, hash) + } + } + + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) + if(target > table.length) { + if(size == 0) reallocTable(target) + else growTable(target) + } + } + + override def update(key: K, value: V): Unit = put0(key, value, false) + + override def put(key: K, value: V): Option[V] = put0(key, value, true) match { + case null => None + case sm => sm + } + + def addOne(elem: (K, V)): this.type = { put0(elem._1, elem._2, false); this } + + @`inline` private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(key) + val idx = index(hash) + put0(key, value, getOld, hash, idx) + } + + private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { + val res = table(idx) match { + case n: RBNode @uc => + insert(n, idx, key, hash, value) + case _old => + val old: LLNode = _old.asInstanceOf[LLNode] + if(old eq null) { + table(idx) = new LLNode(key, hash, value, null) + } else { + var remaining = CollisionProofHashMap.treeifyThreshold + var prev: LLNode = null + var n = old + while((n ne null) && n.hash <= hash && remaining > 0) { + if(n.hash == hash && key == n.key) { + val old = n.value + n.value = value + return (if(getOld) Some(old) else null) + } + prev = n + n = n.next + remaining -= 1 + } + if(remaining == 0) { + treeify(old, idx) + return put0(key, value, getOld, hash, idx) + } + if(prev eq null) table(idx) = new LLNode(key, hash, value, old) + else prev.next = new LLNode(key, hash, value, prev.next) + } + true + } + if(res) contentSize += 1 + if(res) Some(null.asInstanceOf[V]) else null //TODO + } + + private[this] def treeify(old: LLNode, idx: Int): Unit = { + table(idx) = CollisionProofHashMap.leaf(old.key, old.hash, old.value, red = false, null) + var n: LLNode = old.next + while(n ne null) { + val root = table(idx).asInstanceOf[RBNode] + insertIntoExisting(root, idx, n.key, n.hash, n.value, root) + n = n.next + } + } + + override def addAll(xs: IterableOnce[(K, V)]): this.type = { + val k = xs.knownSize + if(k > 0) sizeHint(contentSize + k) + super.addAll(xs) + } + + // returns the old value or Statics.pfMarker if not found + private[this] def remove0(elem: K) : Any = { + val hash = computeHash(elem) + val idx = index(hash) + table(idx) match { + case null => Statics.pfMarker + case t: RBNode @uc => + val v = delete(t, idx, elem, hash) + if(v.asInstanceOf[AnyRef] ne Statics.pfMarker) contentSize -= 1 + v + case nd: LLNode @uc if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + contentSize -= 1 + nd.value + case nd: LLNode @uc => + // find an element that matches + var prev = nd + var next = nd.next + while((next ne null) && next.hash <= hash) { + if(next.hash == hash && next.key == elem) { + prev.next = next.next + contentSize -= 1 + return next.value + } + prev = next + next = next.next + } + Statics.pfMarker + } + } + + private[this] abstract class MapIterator[R] extends AbstractIterator[R] { + protected[this] def extract(node: LLNode): R + protected[this] def extract(node: RBNode): R + + private[this] var i = 0 + private[this] var node: Node = null + private[this] val len = table.length + + def hasNext: Boolean = { + if(node ne null) true + else { + while(i < len) { + val n = table(i) + i += 1 + n match { + case null => + case n: RBNode @uc => + node = CollisionProofHashMap.minNodeNonNull(n) + return true + case n: LLNode @uc => + node = n + return true + } + } + false + } + } + + def next(): R = + if(!hasNext) Iterator.empty.next() + else node match { + case n: RBNode @uc => + val r = extract(n) + node = CollisionProofHashMap.successor(n ) + r + case n: LLNode @uc => + val r = extract(n) + node = n.next + r + } + } + + override def keysIterator: Iterator[K] = { + if (isEmpty) Iterator.empty + else new MapIterator[K] { + protected[this] def extract(node: LLNode) = node.key + protected[this] def extract(node: RBNode) = node.key + } + } + + override def iterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else new MapIterator[(K, V)] { + protected[this] def extract(node: LLNode) = (node.key, node.value) + protected[this] def extract(node: RBNode) = (node.key, node.value) + } + } + + private[this] def growTable(newlen: Int) = { + var oldlen = table.length + table = java.util.Arrays.copyOf(table, newlen) + threshold = newThreshold(table.length) + while(oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if(old ne null) splitBucket(old, i, i + oldlen, oldlen) + i += 1 + } + oldlen *= 2 + } + } + + @`inline` private[this] def reallocTable(newlen: Int) = { + table = new Array(newlen) + threshold = newThreshold(table.length) + } + + @`inline` private[this] def splitBucket(tree: Node, lowBucket: Int, highBucket: Int, mask: Int): Unit = tree match { + case t: LLNode @uc => splitBucket(t, lowBucket, highBucket, mask) + case t: RBNode @uc => splitBucket(t, lowBucket, highBucket, mask) + } + + private[this] def splitBucket(list: LLNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { + val preLow: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + val preHigh: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + //preLow.next = null + //preHigh.next = null + var lastLow: LLNode = preLow + var lastHigh: LLNode = preHigh + var n = list + while(n ne null) { + val next = n.next + if((n.hash & mask) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if(list ne preLow.next) table(lowBucket) = preLow.next + if(preHigh.next ne null) { + table(highBucket) = preHigh.next + lastHigh.next = null + } + } + + private[this] def splitBucket(tree: RBNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { + var lowCount, highCount = 0 + tree.foreachNode((n: RBNode) => if((n.hash & mask) != 0) highCount += 1 else lowCount += 1) + if(highCount != 0) { + if(lowCount == 0) { + table(lowBucket) = null + table(highBucket) = tree + } else { + table(lowBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) == 0), lowCount) + table(highBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) != 0), highCount) + } + } + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt + + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + } + + override def remove(key: K): Option[V] = { + val v = remove0(key) + if(v.asInstanceOf[AnyRef] eq Statics.pfMarker) None else Some(v.asInstanceOf[V]) + } + + def subtractOne(elem: K): this.type = { remove0(elem); this } + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + override def foreach[U](f: ((K, V)) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n match { + case n: LLNode @uc => n.foreach(f) + case n: RBNode @uc => n.foreach(f) + } + i += 1 + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n match { + case n: LLNode @uc => n.foreachEntry(f) + case n: RBNode @uc => n.foreachEntry(f) + } + i += 1 + } + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new CollisionProofHashMap.DeserializationFactory[K, V](table.length, loadFactor, ordering), this) + + override protected[this] def className = "CollisionProofHashMap" + + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + val hash = computeHash(key) + val idx = index(hash) + table(idx) match { + case null => () + case n: LLNode @uc => + val nd = n.getNode(key, hash) + if(nd != null) return nd.value + case n => + val nd = n.asInstanceOf[RBNode].getNode(key, hash) + if(nd != null) return nd.value + } + val table0 = table + val default = defaultValue + if(contentSize + 1 >= threshold) growTable(table.length * 2) + // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. + val newIdx = if (table0 eq table) idx else index(hash) + put0(key, default, false, hash, newIdx) + default + } + + ///////////////////// Overrides code from SortedMapOps + + /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[K2, V2](f: ((K, V)) => (K2, V2)) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) + + /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.FlatMap(this, f)) + + /** Builds a new sorted map by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.Collect(this, pf)) + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CollisionProofHashMap[K, V2] = sortedMapFactory.from(suffix match { + case it: Iterable[(K, V2)] => new View.Concat(this, it) + case _ => iterator.concat(suffix.iterator) + }) + + /** Alias for `concat` */ + @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]): CollisionProofHashMap[K, V2] = concat(xs) + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def + [V1 >: V](kv: (K, V1)): CollisionProofHashMap[K, V1] = + sortedMapFactory.from(new View.Appended(this, kv)) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CollisionProofHashMap[K, V1] = + sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) + + ///////////////////// RedBlackTree code derived from mutable.RedBlackTree: + + @`inline` private[this] def isRed(node: RBNode) = (node ne null) && node.red + @`inline` private[this] def isBlack(node: RBNode) = (node eq null) || !node.red + + @unused @`inline` private[this] def compare(key: K, hash: Int, node: LLNode): Int = { + val i = hash - node.hash + if(i != 0) i else ordering.compare(key, node.key) + } + + @`inline` private[this] def compare(key: K, hash: Int, node: RBNode): Int = { + /*val i = hash - node.hash + if(i != 0) i else*/ ordering.compare(key, node.key) + } + + // ---- insertion ---- + + @tailrec private[this] final def insertIntoExisting(_root: RBNode, bucket: Int, key: K, hash: Int, value: V, x: RBNode): Boolean = { + val cmp = compare(key, hash, x) + if(cmp == 0) { + x.value = value + false + } else { + val next = if(cmp < 0) x.left else x.right + if(next eq null) { + val z = CollisionProofHashMap.leaf(key, hash, value, red = true, x) + if (cmp < 0) x.left = z else x.right = z + table(bucket) = fixAfterInsert(_root, z) + return true + } + else insertIntoExisting(_root, bucket, key, hash, value, next) + } + } + + private[this] final def insert(tree: RBNode, bucket: Int, key: K, hash: Int, value: V): Boolean = { + if(tree eq null) { + table(bucket) = CollisionProofHashMap.leaf(key, hash, value, red = false, null) + true + } else insertIntoExisting(tree, bucket, key, hash, value, tree) + } + + private[this] def fixAfterInsert(_root: RBNode, node: RBNode): RBNode = { + var root = _root + var z = node + while (isRed(z.parent)) { + if (z.parent eq z.parent.parent.left) { + val y = z.parent.parent.right + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.right) { + z = z.parent + root = rotateLeft(root, z) + } + z.parent.red = false + z.parent.parent.red = true + root = rotateRight(root, z.parent.parent) + } + } else { // symmetric cases + val y = z.parent.parent.left + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.left) { + z = z.parent + root = rotateRight(root, z) + } + z.parent.red = false + z.parent.parent.red = true + root = rotateLeft(root, z.parent.parent) + } + } + } + root.red = false + root + } + + // ---- deletion ---- + + // returns the old value or Statics.pfMarker if not found + private[this] def delete(_root: RBNode, bucket: Int, key: K, hash: Int): Any = { + var root = _root + val z = root.getNode(key, hash: Int) + if (z ne null) { + val oldValue = z.value + var y = z + var yIsRed = y.red + var x: RBNode = null + var xParent: RBNode = null + + if (z.left eq null) { + x = z.right + root = transplant(root, z, z.right) + xParent = z.parent + } + else if (z.right eq null) { + x = z.left + root = transplant(root, z, z.left) + xParent = z.parent + } + else { + y = CollisionProofHashMap.minNodeNonNull(z.right) + yIsRed = y.red + x = y.right + + if (y.parent eq z) xParent = y + else { + xParent = y.parent + root = transplant(root, y, y.right) + y.right = z.right + y.right.parent = y + } + root = transplant(root, z, y) + y.left = z.left + y.left.parent = y + y.red = z.red + } + + if (!yIsRed) root = fixAfterDelete(root, x, xParent) + if(root ne _root) table(bucket) = root + oldValue + } else Statics.pfMarker + } + + private[this] def fixAfterDelete(_root: RBNode, node: RBNode, parent: RBNode): RBNode = { + var root = _root + var x = node + var xParent = parent + while ((x ne root) && isBlack(x)) { + if (x eq xParent.left) { + var w = xParent.right + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + root = rotateLeft(root, xParent) + w = xParent.right + } + if (isBlack(w.left) && isBlack(w.right)) { + w.red = true + x = xParent + } else { + if (isBlack(w.right)) { + w.left.red = false + w.red = true + root = rotateRight(root, w) + w = xParent.right + } + w.red = xParent.red + xParent.red = false + w.right.red = false + root = rotateLeft(root, xParent) + x = root + } + } else { // symmetric cases + var w = xParent.left + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + root = rotateRight(root, xParent) + w = xParent.left + } + if (isBlack(w.right) && isBlack(w.left)) { + w.red = true + x = xParent + } else { + if (isBlack(w.left)) { + w.right.red = false + w.red = true + root = rotateLeft(root, w) + w = xParent.left + } + w.red = xParent.red + xParent.red = false + w.left.red = false + root = rotateRight(root, xParent) + x = root + } + } + xParent = x.parent + } + if (x ne null) x.red = false + root + } + + // ---- helpers ---- + + @`inline` private[this] def rotateLeft(_root: RBNode, x: RBNode): RBNode = { + var root = _root + val y = x.right + x.right = y.left + + val xp = x.parent + if (y.left ne null) y.left.parent = x + y.parent = xp + + if (xp eq null) root = y + else if (x eq xp.left) xp.left = y + else xp.right = y + + y.left = x + x.parent = y + root + } + + @`inline` private[this] def rotateRight(_root: RBNode, x: RBNode): RBNode = { + var root = _root + val y = x.left + x.left = y.right + + val xp = x.parent + if (y.right ne null) y.right.parent = x + y.parent = xp + + if (xp eq null) root = y + else if (x eq xp.right) xp.right = y + else xp.left = y + + y.right = x + x.parent = y + root + } + + /** + * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous + * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged. + */ + private[this] def transplant(_root: RBNode, to: RBNode, from: RBNode): RBNode = { + var root = _root + if (to.parent eq null) root = from + else if (to eq to.parent.left) to.parent.left = from + else to.parent.right = from + if (from ne null) from.parent = to.parent + root + } + + // building + + def fromNodes(xs: Iterator[Node], size: Int): RBNode = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): RBNode = size match { + case 0 => null + case 1 => + val nn = xs.next() + val (key, hash, value) = nn match { + case nn: LLNode @uc => (nn.key, nn.hash, nn.value) + case nn: RBNode @uc => (nn.key, nn.hash, nn.value) + } + new RBNode(key, hash, value, level == maxUsedDepth && level != 1, null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val nn = xs.next() + val right = f(level+1, size-1-leftSize) + val (key, hash, value) = nn match { + case nn: LLNode @uc => (nn.key, nn.hash, nn.value) + case nn: RBNode @uc => (nn.key, nn.hash, nn.value) + } + val n = new RBNode(key, hash, value, false, left, right, null) + if(left ne null) left.parent = n + right.parent = n + n + } + f(1, size) + } +} + +/** + * $factoryInfo + * @define Coll `mutable.CollisionProofHashMap` + * @define coll mutable collision-proof hash map + */ +@SerialVersionUID(3L) +object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { + private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a CollisionProofHashMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." + + def from[K : Ordering, V](it: scala.collection.IterableOnce[(K, V)]): CollisionProofHashMap[K, V] = { + val k = it.knownSize + val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity + new CollisionProofHashMap[K, V](cap, defaultLoadFactor) ++= it + } + + def empty[K : Ordering, V]: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] + + def newBuilder[K : Ordering, V]: Builder[(K, V), CollisionProofHashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + + def newBuilder[K : Ordering, V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), CollisionProofHashMap[K, V]] = + new GrowableBuilder[(K, V), CollisionProofHashMap[K, V]](new CollisionProofHashMap[K, V](initialCapacity, loadFactor)) { + override def sizeHint(size: Int) = elems.sizeHint(size) + } + + /** The default load factor for the hash table */ + final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + final def defaultInitialCapacity: Int = 16 + + @SerialVersionUID(3L) + private final class DeserializationFactory[K, V](val tableLength: Int, val loadFactor: Double, val ordering: Ordering[K]) extends Factory[(K, V), CollisionProofHashMap[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V](tableLength, loadFactor)(ordering) ++= it + def newBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] = CollisionProofHashMap.newBuilder(tableLength, loadFactor)(ordering) + } + + @unused @`inline` private def compare[K, V](key: K, hash: Int, node: LLNode[K, V])(implicit ord: Ordering[K]): Int = { + val i = hash - node.hash + if(i != 0) i else ord.compare(key, node.key) + } + + @`inline` private def compare[K, V](key: K, hash: Int, node: RBNode[K, V])(implicit ord: Ordering[K]): Int = { + /*val i = hash - node.hash + if(i != 0) i else*/ ord.compare(key, node.key) + } + + private final val treeifyThreshold = 8 + + // Superclass for RBNode and LLNode to help the JIT with optimizing instance checks, but no shared common fields. + // Keeping calls monomorphic where possible and dispatching manually where needed is faster. + sealed abstract class Node + + /////////////////////////// Red-Black Tree Node + + final class RBNode[K, V](var key: K, var hash: Int, var value: V, var red: Boolean, var left: RBNode[K, V], var right: RBNode[K, V], var parent: RBNode[K, V]) extends Node { + override def toString: String = "RBNode(" + key + ", " + hash + ", " + value + ", " + red + ", " + left + ", " + right + ")" + + @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): RBNode[K, V] = { + val cmp = compare(k, h, this) + if (cmp < 0) { + if(left ne null) left.getNode(k, h) else null + } else if (cmp > 0) { + if(right ne null) right.getNode(k, h) else null + } else this + } + + def foreach[U](f: ((K, V)) => U): Unit = { + if(left ne null) left.foreach(f) + f((key, value)) + if(right ne null) right.foreach(f) + } + + def foreachEntry[U](f: (K, V) => U): Unit = { + if(left ne null) left.foreachEntry(f) + f(key, value) + if(right ne null) right.foreachEntry(f) + } + + def foreachNode[U](f: RBNode[K, V] => U): Unit = { + if(left ne null) left.foreachNode(f) + f(this) + if(right ne null) right.foreachNode(f) + } + } + + @`inline` private def leaf[A, B](key: A, hash: Int, value: B, red: Boolean, parent: RBNode[A, B]): RBNode[A, B] = + new RBNode(key, hash, value, red, null, null, parent) + + @tailrec private def minNodeNonNull[A, B](node: RBNode[A, B]): RBNode[A, B] = + if (node.left eq null) node else minNodeNonNull(node.left) + + /** + * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is, + * therefore, the last node), this method returns `null`. + */ + private def successor[A, B](node: RBNode[A, B]): RBNode[A, B] = { + if (node.right ne null) minNodeNonNull(node.right) + else { + var x = node + var y = x.parent + while ((y ne null) && (x eq y.right)) { + x = y + y = y.parent + } + y + } + } + + private final class RBNodesIterator[A, B](tree: RBNode[A, B])(implicit @unused ord: Ordering[A]) extends AbstractIterator[RBNode[A, B]] { + private[this] var nextNode: RBNode[A, B] = if(tree eq null) null else minNodeNonNull(tree) + + def hasNext: Boolean = nextNode ne null + + @throws[NoSuchElementException] + def next(): RBNode[A, B] = nextNode match { + case null => Iterator.empty.next() + case node => + nextNode = successor(node) + node + } + } + + /////////////////////////// Linked List Node + + private final class LLNode[K, V](var key: K, var hash: Int, var value: V, var next: LLNode[K, V]) extends Node { + override def toString = s"LLNode($key, $value, $hash) -> $next" + + private[this] def eq(a: Any, b: Any): Boolean = + if(a.asInstanceOf[AnyRef] eq null) b.asInstanceOf[AnyRef] eq null else a.asInstanceOf[AnyRef].equals(b) + + @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): LLNode[K, V] = { + if(h == hash && eq(k, key) /*ord.compare(k, key) == 0*/) this + else if((next eq null) || (hash > h)) null + else next.getNode(k, h) + } + + @tailrec def foreach[U](f: ((K, V)) => U): Unit = { + f((key, value)) + if(next ne null) next.foreach(f) + } + + @tailrec def foreachEntry[U](f: (K, V) => U): Unit = { + f(key, value) + if(next ne null) next.foreachEntry(f) + } + + @tailrec def foreachNode[U](f: LLNode[K, V] => U): Unit = { + f(this) + if(next ne null) next.foreachNode(f) + } + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/Growable.scala b/tests/pos-special/stdlib/collection/mutable/Growable.scala index 3b5eabac37bf..914742b9013a 100644 --- a/tests/pos-special/stdlib/collection/mutable/Growable.scala +++ b/tests/pos-special/stdlib/collection/mutable/Growable.scala @@ -14,8 +14,6 @@ package scala package collection package mutable -import language.experimental.captureChecking - /** This trait forms part of collections that can be augmented * using a `+=` operator and that can be cleared of all elements using * a `clear` method. @@ -56,7 +54,7 @@ trait Growable[-A] extends Clearable { * @param xs the IterableOnce producing the elements to $add. * @return the $coll itself. */ - def addAll(xs: IterableOnce[A]^): this.type = { + def addAll(xs: IterableOnce[A]): this.type = { if (xs.asInstanceOf[AnyRef] eq this) addAll(Buffer.from(xs)) // avoid mutating under our own iterator else { val it = xs.iterator @@ -68,7 +66,7 @@ trait Growable[-A] extends Clearable { } /** Alias for `addAll` */ - @`inline` final def ++= (xs: IterableOnce[A]^): this.type = addAll(xs) + @`inline` final def ++= (xs: IterableOnce[A]): this.type = addAll(xs) /** @return The number of elements in the collection under construction, if it can be cheaply computed, * -1 otherwise. The default implementation always returns -1. @@ -85,7 +83,7 @@ object Growable { * @tparam A Element type * @return The filled instance */ - def from[A](empty: Growable[A], it: collection.IterableOnce[A]^): empty.type = empty ++= it + def from[A](empty: Growable[A], it: collection.IterableOnce[A]): empty.type = empty ++= it } diff --git a/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala new file mode 100644 index 000000000000..7e945dffb99e --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala @@ -0,0 +1,37 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + + +/** The canonical builder for collections that are growable, i.e. that support an + * efficient `+=` method which adds an element to the collection. + * + * GrowableBuilders can produce only a single instance of the collection they are growing. + * + * @define Coll `GrowingBuilder` + * @define coll growing builder + */ +class GrowableBuilder[Elem, To <: Growable[Elem]](protected val elems: To) + extends Builder[Elem, To] { + + def clear(): Unit = elems.clear() + + def result(): To = elems + + def addOne(elem: Elem): this.type = { elems += elem; this } + + override def addAll(xs: IterableOnce[Elem]): this.type = { elems.addAll(xs); this } + + override def knownSize: Int = elems.knownSize +} diff --git a/tests/pos-special/stdlib/collection/mutable/HashMap.scala b/tests/pos-special/stdlib/collection/mutable/HashMap.scala new file mode 100644 index 000000000000..7ad3cf3869e8 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/HashMap.scala @@ -0,0 +1,654 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.{nowarn, tailrec} +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializationProxy +import scala.util.hashing.MurmurHash3 + +/** This class implements mutable maps using a hashtable. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] + * section on `Hash Tables` for more information. + * + * @tparam K the type of the keys contained in this hash map. + * @tparam V the type of the values assigned to keys in this hash map. + * + * @define Coll `mutable.HashMap` + * @define coll mutable hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@deprecatedInheritance("HashMap will be made final; use .withDefault for the common use case of computing a default value", "2.13.0") +class HashMap[K, V](initialCapacity: Int, loadFactor: Double) + extends AbstractMap[K, V] + with MapOps[K, V, HashMap, HashMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, HashMap[K, V]] + with StrictOptimizedMapOps[K, V, HashMap, HashMap[K, V]] + with MapFactoryDefaults[K, V, HashMap, Iterable] + with Serializable { + + /* The HashMap class holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. + * - Every bucket is sorted in ascendent hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + def this() = this(HashMap.defaultInitialCapacity, HashMap.defaultLoadFactor) + + import HashMap.Node + + /** The actual hash table. */ + private[this] var table = new Array[Node[K, V]](tableSizeFor(initialCapacity)) + + /** The next size value at which to resize (capacity * load factor). */ + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def size: Int = contentSize + + /** Performs the inverse operation of improveHash. In this case, it happens to be identical to improveHash*/ + @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) + + /** Computes the improved hash of an original (`any.##`) hash. */ + @`inline` private[this] def improveHash(originalHash: Int): Int = { + // Improve the hash by xoring the high 16 bits into the low 16 bits just in case entropy is skewed towards the + // high-value bits. We only use the lowest bits to determine the hash bucket. This is the same improvement + // algorithm as in java.util.HashMap. + // + // This function is also its own inverse. That is, for all ints i, improveHash(improveHash(i)) = i + // this allows us to retrieve the original hash when we need it, for instance when appending to an immutable.HashMap + // and that is why unimproveHash simply forwards to this method + originalHash ^ (originalHash >>> 16) + } + + /** Computes the improved hash of this key */ + @`inline` private[this] def computeHash(o: K): Int = improveHash(o.##) + + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) + + override def contains(key: K): Boolean = findNode(key) ne null + + @`inline` private[this] def findNode(key: K): Node[K, V] = { + val hash = computeHash(key) + table(index(hash)) match { + case null => null + case nd => nd.findNode(key, hash) + } + } + + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) + if(target > table.length) growTable(target) + } + + override def addAll(xs: IterableOnce[(K, V)]): this.type = { + sizeHint(xs.knownSize) + + xs match { + case hm: immutable.HashMap[K, V] => + hm.foreachWithHash((k, v, h) => put0(k, v, improveHash(h), getOld = false)) + this + case hm: mutable.HashMap[K, V] => + val iter = hm.nodeIterator + while (iter.hasNext) { + val next = iter.next() + put0(next.key, next.value, next.hash, getOld = false) + } + this + case lhm: mutable.LinkedHashMap[K, V] => + val iter = lhm.entryIterator + while (iter.hasNext) { + val entry = iter.next() + put0(entry.key, entry.value, entry.hash, getOld = false) + } + this + case thatMap: Map[K, V] => + thatMap.foreachEntry { (key: K, value: V) => + put0(key, value, improveHash(key.##), getOld = false) + } + this + case _ => + super.addAll(xs) + } + } + + // Override updateWith for performance, so we can do the update while hashing + // the input key only once and performing one lookup into the hash table + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + if (getClass != classOf[HashMap[_, _]]) { + // subclasses of HashMap might customise `get` ... + super.updateWith(key)(remappingFunction) + } else { + val hash = computeHash(key) + val indexedHash = index(hash) + + var foundNode: Node[K, V] = null + var previousNode: Node[K, V] = null + table(indexedHash) match { + case null => + case nd => + @tailrec + def findNode(prev: Node[K, V], nd: Node[K, V], k: K, h: Int): Unit = { + if (h == nd.hash && k == nd.key) { + previousNode = prev + foundNode = nd + } + else if ((nd.next eq null) || (nd.hash > h)) () + else findNode(nd, nd.next, k, h) + } + + findNode(null, nd, key, hash) + } + + val previousValue = foundNode match { + case null => None + case nd => Some(nd.value) + } + + val nextValue = remappingFunction(previousValue) + + (previousValue, nextValue) match { + case (None, None) => // do nothing + + case (Some(_), None) => + if (previousNode != null) previousNode.next = foundNode.next + else table(indexedHash) = foundNode.next + contentSize -= 1 + + case (None, Some(value)) => + val newIndexedHash = + if (contentSize + 1 >= threshold) { + growTable(table.length * 2) + index(hash) + } else indexedHash + put0(key, value, false, hash, newIndexedHash) + + case (Some(_), Some(newValue)) => foundNode.value = newValue + } + nextValue + } + } + + override def subtractAll(xs: IterableOnce[K]): this.type = { + if (size == 0) { + return this + } + + xs match { + case hs: immutable.HashSet[K] => + hs.foreachWithHashWhile { (k, h) => + remove0(k, improveHash(h)) + size > 0 + } + this + case hs: mutable.HashSet[K] => + val iter = hs.nodeIterator + while (iter.hasNext) { + val next = iter.next() + remove0(next.key, next.hash) + if (size == 0) return this + } + this + case lhs: mutable.LinkedHashSet[K] => + val iter = lhs.entryIterator + while (iter.hasNext) { + val next = iter.next() + remove0(next.key, next.hash) + if (size == 0) return this + } + this + case _ => super.subtractAll(xs) + } + } + + /** Adds a key-value pair to this map + * + * @param key the key to add + * @param value the value to add + * @param hash the **improved** hashcode of `key` (see computeHash) + * @param getOld if true, then the previous value for `key` will be returned, otherwise, false + */ + private[this] def put0(key: K, value: V, hash: Int, getOld: Boolean): Some[V] = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + val idx = index(hash) + put0(key, value, getOld, hash, idx) + } + + private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(key) + val idx = index(hash) + put0(key, value, getOld, hash, idx) + } + + + private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { + table(idx) match { + case null => + table(idx) = new Node[K, V](key, hash, value, null) + case old => + var prev: Node[K, V] = null + var n = old + while((n ne null) && n.hash <= hash) { + if(n.hash == hash && key == n.key) { + val old = n.value + n.value = value + return if(getOld) Some(old) else null + } + prev = n + n = n.next + } + if(prev eq null) table(idx) = new Node(key, hash, value, old) + else prev.next = new Node(key, hash, value, prev.next) + } + contentSize += 1 + null + } + + private def remove0(elem: K) : Node[K, V] = remove0(elem, computeHash(elem)) + + /** Removes a key from this map if it exists + * + * @param elem the element to remove + * @param hash the **improved** hashcode of `element` (see computeHash) + * @return the node that contained element if it was present, otherwise null + */ + private[this] def remove0(elem: K, hash: Int) : Node[K, V] = { + val idx = index(hash) + table(idx) match { + case null => null + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + contentSize -= 1 + nd + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while((next ne null) && next.hash <= hash) { + if(next.hash == hash && next.key == elem) { + prev.next = next.next + contentSize -= 1 + return next + } + prev = next + next = next.next + } + null + } + } + + private[this] abstract class HashMapIterator[A] extends AbstractIterator[A] { + private[this] var i = 0 + private[this] var node: Node[K, V] = null + private[this] val len = table.length + + protected[this] def extract(nd: Node[K, V]): A + + def hasNext: Boolean = { + if(node ne null) true + else { + while(i < len) { + val n = table(i) + i += 1 + if(n ne null) { node = n; return true } + } + false + } + } + + def next(): A = + if(!hasNext) Iterator.empty.next() + else { + val r = extract(node) + node = node.next + r + } + } + + override def iterator: Iterator[(K, V)] = + if(size == 0) Iterator.empty + else new HashMapIterator[(K, V)] { + protected[this] def extract(nd: Node[K, V]) = (nd.key, nd.value) + } + + override def keysIterator: Iterator[K] = + if(size == 0) Iterator.empty + else new HashMapIterator[K] { + protected[this] def extract(nd: Node[K, V]) = nd.key + } + + override def valuesIterator: Iterator[V] = + if(size == 0) Iterator.empty + else new HashMapIterator[V] { + protected[this] def extract(nd: Node[K, V]) = nd.value + } + + + /** Returns an iterator over the nodes stored in this HashMap */ + private[collection] def nodeIterator: Iterator[Node[K, V]] = + if(size == 0) Iterator.empty + else new HashMapIterator[Node[K, V]] { + protected[this] def extract(nd: Node[K, V]) = nd + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = + shape. + parUnbox(new convert.impl.AnyTableStepper[(K, V), Node[K, V]](size, table, _.next, node => (node.key, node.value), 0, table.length)). + asInstanceOf[S with EfficientSplit] + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntTableStepper[Node[K, V]] (size, table, _.next, _.key.asInstanceOf[Int], 0, table.length) + case StepperShape.LongShape => new LongTableStepper[Node[K, V]] (size, table, _.next, _.key.asInstanceOf[Long], 0, table.length) + case StepperShape.DoubleShape => new DoubleTableStepper[Node[K, V]](size, table, _.next, _.key.asInstanceOf[Double], 0, table.length) + case _ => shape.parUnbox(new AnyTableStepper[K, Node[K, V]](size, table, _.next, _.key, 0, table.length)) + } + s.asInstanceOf[S with EfficientSplit] + } + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntTableStepper[Node[K, V]] (size, table, _.next, _.value.asInstanceOf[Int], 0, table.length) + case StepperShape.LongShape => new LongTableStepper[Node[K, V]] (size, table, _.next, _.value.asInstanceOf[Long], 0, table.length) + case StepperShape.DoubleShape => new DoubleTableStepper[Node[K, V]](size, table, _.next, _.value.asInstanceOf[Double], 0, table.length) + case _ => shape.parUnbox(new AnyTableStepper[V, Node[K, V]](size, table, _.next, _.value, 0, table.length)) + } + s.asInstanceOf[S with EfficientSplit] + } + + private[this] def growTable(newlen: Int) = { + if (newlen < 0) + throw new RuntimeException(s"new HashMap table size $newlen exceeds maximum") + var oldlen = table.length + threshold = newThreshold(newlen) + if(size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow: Node[K, V] = new Node(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + val preHigh: Node[K, V] = new Node(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while(oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if(old ne null) { + preLow.next = null + preHigh.next = null + var lastLow: Node[K, V] = preLow + var lastHigh: Node[K, V] = preHigh + var n = old + while(n ne null) { + val next = n.next + if((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if(old ne preLow.next) table(i) = preLow.next + if(preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt + + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + } + + def get(key: K): Option[V] = findNode(key) match { + case null => None + case nd => Some(nd.value) + } + + @throws[NoSuchElementException] + override def apply(key: K): V = findNode(key) match { + case null => default(key) + case nd => nd.value + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + if (getClass != classOf[HashMap[_, _]]) { + // subclasses of HashMap might customise `get` ... + super.getOrElse(key, default) + } else { + // .. but in the common case, we can avoid the Option boxing. + val nd = findNode(key) + if (nd eq null) default else nd.value + } + } + + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + if (getClass != classOf[HashMap[_, _]]) { + // subclasses of HashMap might customise `get` ... + super.getOrElseUpdate(key, defaultValue) + } else { + val hash = computeHash(key) + val idx = index(hash) + val nd = table(idx) match { + case null => null + case nd => nd.findNode(key, hash) + } + if(nd != null) nd.value + else { + val table0 = table + val default = defaultValue + if(contentSize + 1 >= threshold) growTable(table.length * 2) + // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. + val newIdx = if (table0 eq table) idx else index(hash) + put0(key, default, false, hash, newIdx) + default + } + } + } + + override def put(key: K, value: V): Option[V] = put0(key, value, true) match { + case null => None + case sm => sm + } + + override def remove(key: K): Option[V] = remove0(key) match { + case null => None + case nd => Some(nd.value) + } + + override def update(key: K, value: V): Unit = put0(key, value, false) + + def addOne(elem: (K, V)): this.type = { put0(elem._1, elem._2, false); this } + + def subtractOne(elem: K): this.type = { remove0(elem); this } + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + override def foreach[U](f: ((K, V)) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n.foreach(f) + i += 1 + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n.foreachEntry(f) + i += 1 + } + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new mutable.HashMap.DeserializationFactory[K, V](table.length, loadFactor), this) + + override def filterInPlace(p: (K, V) => Boolean): this.type = { + if (nonEmpty) { + var bucket = 0 + + while (bucket < table.length) { + var head = table(bucket) + + while ((head ne null) && !p(head.key, head.value)) { + head = head.next + contentSize -= 1 + } + + if (head ne null) { + var prev = head + var next = head.next + + while (next ne null) { + if (p(next.key, next.value)) { + prev = next + } else { + prev.next = next.next + contentSize -= 1 + } + next = next.next + } + } + + table(bucket) = head + bucket += 1 + } + } + this + } + + // TODO: rename to `mapValuesInPlace` and override the base version (not binary compatible) + private[mutable] def mapValuesInPlaceImpl(f: (K, V) => V): this.type = { + val len = table.length + var i = 0 + while (i < len) { + var n = table(i) + while (n ne null) { + n.value = f(n.key, n.value) + n = n.next + } + i += 1 + } + this + } + + override def mapFactory: MapFactory[HashMap] = HashMap + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "HashMap" + + override def hashCode: Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + val tupleHashIterator = new HashMapIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override protected[this] def extract(nd: Node[K, V]): Any = { + hash = MurmurHash3.tuple2Hash(unimproveHash(nd.hash), nd.value.##) + this + } + } + MurmurHash3.unorderedHash(tupleHashIterator, MurmurHash3.mapSeed) + } + } +} + +/** + * $factoryInfo + * @define Coll `mutable.HashMap` + * @define coll mutable hash map + */ +@SerialVersionUID(3L) +object HashMap extends MapFactory[HashMap] { + + def empty[K, V]: HashMap[K, V] = new HashMap[K, V] + + def from[K, V](it: collection.IterableOnce[(K, V)]): HashMap[K, V] = { + val k = it.knownSize + val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity + new HashMap[K, V](cap, defaultLoadFactor).addAll(it) + } + + def newBuilder[K, V]: Builder[(K, V), HashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + + def newBuilder[K, V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), HashMap[K, V]] = + new GrowableBuilder[(K, V), HashMap[K, V]](new HashMap[K, V](initialCapacity, loadFactor)) { + override def sizeHint(size: Int) = elems.sizeHint(size) + } + + /** The default load factor for the hash table */ + final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + final def defaultInitialCapacity: Int = 16 + + @SerialVersionUID(3L) + private final class DeserializationFactory[K, V](val tableLength: Int, val loadFactor: Double) extends Factory[(K, V), HashMap[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): HashMap[K, V] = new HashMap[K, V](tableLength, loadFactor).addAll(it) + def newBuilder: Builder[(K, V), HashMap[K, V]] = HashMap.newBuilder(tableLength, loadFactor) + } + + private[collection] final class Node[K, V](_key: K, _hash: Int, private[this] var _value: V, private[this] var _next: Node[K, V]) { + def key: K = _key + def hash: Int = _hash + def value: V = _value + def value_= (v: V): Unit = _value = v + def next: Node[K, V] = _next + def next_= (n: Node[K, V]): Unit = _next = n + + @tailrec + def findNode(k: K, h: Int): Node[K, V] = + if(h == _hash && k == _key) this + else if((_next eq null) || (_hash > h)) null + else _next.findNode(k, h) + + @tailrec + def foreach[U](f: ((K, V)) => U): Unit = { + f((_key, _value)) + if(_next ne null) _next.foreach(f) + } + + @tailrec + def foreachEntry[U](f: (K, V) => U): Unit = { + f(_key, _value) + if(_next ne null) _next.foreachEntry(f) + } + + override def toString = s"Node($key, $value, $hash) -> $next" + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/HashSet.scala b/tests/pos-special/stdlib/collection/mutable/HashSet.scala new file mode 100644 index 000000000000..425721a41626 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/HashSet.scala @@ -0,0 +1,456 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.tailrec +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializationProxy +import scala.util.hashing.MurmurHash3 + +/** This class implements mutable sets using a hashtable. + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] + * section on `Hash Tables` for more information. + * + * @define Coll `mutable.HashSet` + * @define coll mutable hash set + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class HashSet[A](initialCapacity: Int, loadFactor: Double) + extends AbstractSet[A] + with SetOps[A, HashSet, HashSet[A]] + with StrictOptimizedIterableOps[A, HashSet, HashSet[A]] + with IterableFactoryDefaults[A, HashSet] + with Serializable { + + def this() = this(HashSet.defaultInitialCapacity, HashSet.defaultLoadFactor) + + import HashSet.Node + + /* The Hashset class holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains elements whose hash-index is i. + * - Every bucket is sorted in ascendent hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + /** The actual hash table. */ + private[this] var table = new Array[Node[A]](tableSizeFor(initialCapacity)) + + /** The next size value at which to resize (capacity * load factor). */ + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def size: Int = contentSize + + /** Performs the inverse operation of improveHash. In this case, it happens to be identical to improveHash*/ + @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) + + /** Computes the improved hash of an original (`any.##`) hash. */ + private[this] def improveHash(originalHash: Int): Int = { + // Improve the hash by xoring the high 16 bits into the low 16 bits just in case entropy is skewed towards the + // high-value bits. We only use the lowest bits to determine the hash bucket. This is the same improvement + // algorithm as in java.util.HashMap. + originalHash ^ (originalHash >>> 16) + } + + /** Computes the improved hash of this element */ + @`inline` private[this] def computeHash(o: A): Int = improveHash(o.##) + + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) + + override def contains(elem: A): Boolean = findNode(elem) ne null + + @`inline` private[this] def findNode(elem: A): Node[A] = { + val hash = computeHash(elem) + table(index(hash)) match { + case null => null + case nd => nd.findNode(elem, hash) + } + } + + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) + if(target > table.length) growTable(target) + } + + override def add(elem: A) : Boolean = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + addElem(elem, computeHash(elem)) + } + + override def addAll(xs: IterableOnce[A]): this.type = { + sizeHint(xs.knownSize) + xs match { + case hs: immutable.HashSet[A] => + hs.foreachWithHash((k, h) => addElem(k, improveHash(h))) + this + case hs: mutable.HashSet[A] => + val iter = hs.nodeIterator + while (iter.hasNext) { + val next = iter.next() + addElem(next.key, next.hash) + } + this + case lhs: mutable.LinkedHashSet[A] => + val iter = lhs.entryIterator + while (iter.hasNext) { + val next = iter.next() + addElem(next.key, next.hash) + } + this + case _ => super.addAll(xs) + } + } + + override def subtractAll(xs: IterableOnce[A]): this.type = { + if (size == 0) { + return this + } + + xs match { + case hs: immutable.HashSet[A] => + hs.foreachWithHashWhile { (k, h) => + remove(k, improveHash(h)) + size > 0 + } + this + case hs: mutable.HashSet[A] => + val iter = hs.nodeIterator + while (iter.hasNext) { + val next = iter.next() + remove(next.key, next.hash) + if (size == 0) return this + } + this + case lhs: mutable.LinkedHashSet[A] => + val iter = lhs.entryIterator + while (iter.hasNext) { + val next = iter.next() + remove(next.key, next.hash) + if (size == 0) return this + } + this + case _ => super.subtractAll(xs) + } + } + + /** Adds an element to this set + * @param elem element to add + * @param hash the **improved** hash of `elem` (see computeHash) + */ + private[this] def addElem(elem: A, hash: Int) : Boolean = { + val idx = index(hash) + table(idx) match { + case null => + table(idx) = new Node(elem, hash, null) + case old => + var prev: Node[A] = null + var n = old + while((n ne null) && n.hash <= hash) { + if(n.hash == hash && elem == n.key) return false + prev = n + n = n.next + } + if(prev eq null) + table(idx) = new Node(elem, hash, old) + else + prev.next = new Node(elem, hash, prev.next) + } + contentSize += 1 + true + } + + private[this] def remove(elem: A, hash: Int): Boolean = { + val idx = index(hash) + table(idx) match { + case null => false + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + contentSize -= 1 + true + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while((next ne null) && next.hash <= hash) { + if(next.hash == hash && next.key == elem) { + prev.next = next.next + contentSize -= 1 + return true + } + prev = next + next = next.next + } + false + } + } + + override def remove(elem: A) : Boolean = remove(elem, computeHash(elem)) + + private[this] abstract class HashSetIterator[B] extends AbstractIterator[B] { + private[this] var i = 0 + private[this] var node: Node[A] = null + private[this] val len = table.length + + protected[this] def extract(nd: Node[A]): B + + def hasNext: Boolean = { + if(node ne null) true + else { + while(i < len) { + val n = table(i) + i += 1 + if(n ne null) { node = n; return true } + } + false + } + } + + def next(): B = + if(!hasNext) Iterator.empty.next() + else { + val r = extract(node) + node = node.next + r + } + } + + override def iterator: Iterator[A] = new HashSetIterator[A] { + override protected[this] def extract(nd: Node[A]): A = nd.key + } + + /** Returns an iterator over the nodes stored in this HashSet */ + private[collection] def nodeIterator: Iterator[Node[A]] = new HashSetIterator[Node[A]] { + override protected[this] def extract(nd: Node[A]): Node[A] = nd + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntTableStepper[Node[A]] (size, table, _.next, _.key.asInstanceOf[Int], 0, table.length) + case StepperShape.LongShape => new LongTableStepper[Node[A]] (size, table, _.next, _.key.asInstanceOf[Long], 0, table.length) + case StepperShape.DoubleShape => new DoubleTableStepper[Node[A]](size, table, _.next, _.key.asInstanceOf[Double], 0, table.length) + case _ => shape.parUnbox(new AnyTableStepper[A, Node[A]](size, table, _.next, _.key, 0, table.length)) + } + s.asInstanceOf[S with EfficientSplit] + } + + private[this] def growTable(newlen: Int) = { + var oldlen = table.length + threshold = newThreshold(newlen) + if(size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow: Node[A] = new Node(null.asInstanceOf[A], 0, null) + val preHigh: Node[A] = new Node(null.asInstanceOf[A], 0, null) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while(oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if(old ne null) { + preLow.next = null + preHigh.next = null + var lastLow: Node[A] = preLow + var lastHigh: Node[A] = preHigh + var n = old + while(n ne null) { + val next = n.next + if((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if(old ne preLow.next) table(i) = preLow.next + if(preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } + } + + override def filterInPlace(p: A => Boolean): this.type = { + if (nonEmpty) { + var bucket = 0 + + while (bucket < table.length) { + var head = table(bucket) + + while ((head ne null) && !p(head.key)) { + head = head.next + contentSize -= 1 + } + + if (head ne null) { + var prev = head + var next = head.next + + while (next ne null) { + if (p(next.key)) { + prev = next + } else { + prev.next = next.next + contentSize -= 1 + } + next = next.next + } + } + + table(bucket) = head + bucket += 1 + } + } + this + } + + /* + private[mutable] def checkTable(): Unit = { + var i = 0 + var count = 0 + var prev: Node[A] = null + while(i < table.length) { + var n = table(i) + prev = null + while(n != null) { + count += 1 + assert(index(n.hash) == i) + if(prev ne null) assert(prev.hash <= n.hash) + prev = n + n = n.next + } + i += 1 + } + assert(contentSize == count) + } + */ + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt + + def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + } + + override def iterableFactory: IterableFactory[HashSet] = HashSet + + @`inline` def addOne(elem: A): this.type = { add(elem); this } + + @`inline` def subtractOne(elem: A): this.type = { remove(elem); this } + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + override def foreach[U](f: A => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n.foreach(f) + i += 1 + } + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new HashSet.DeserializationFactory[A](table.length, loadFactor), this) + + override protected[this] def className = "HashSet" + + override def hashCode: Int = { + val setIterator = this.iterator + val hashIterator: Iterator[Any] = + if (setIterator.isEmpty) setIterator + else new HashSetIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override protected[this] def extract(nd: Node[A]): Any = { + hash = unimproveHash(nd.hash) + this + } + } + MurmurHash3.unorderedHash(hashIterator, MurmurHash3.setSeed) + } +} + +/** + * $factoryInfo + * @define Coll `mutable.HashSet` + * @define coll mutable hash set + */ +@SerialVersionUID(3L) +object HashSet extends IterableFactory[HashSet] { + + def from[B](it: scala.collection.IterableOnce[B]): HashSet[B] = { + val k = it.knownSize + val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity + new HashSet[B](cap, defaultLoadFactor) ++= it + } + + def empty[A]: HashSet[A] = new HashSet[A] + + def newBuilder[A]: Builder[A, HashSet[A]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + + def newBuilder[A](initialCapacity: Int, loadFactor: Double): Builder[A, HashSet[A]] = + new GrowableBuilder[A, HashSet[A]](new HashSet[A](initialCapacity, loadFactor)) { + override def sizeHint(size: Int) = elems.sizeHint(size) + } + + /** The default load factor for the hash table */ + final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + final def defaultInitialCapacity: Int = 16 + + @SerialVersionUID(3L) + private final class DeserializationFactory[A](val tableLength: Int, val loadFactor: Double) extends Factory[A, HashSet[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): HashSet[A] = new HashSet[A](tableLength, loadFactor) ++= it + def newBuilder: Builder[A, HashSet[A]] = HashSet.newBuilder(tableLength, loadFactor) + } + + private[collection] final class Node[K](_key: K, _hash: Int, private[this] var _next: Node[K]) { + def key: K = _key + def hash: Int = _hash + def next: Node[K] = _next + def next_= (n: Node[K]): Unit = _next = n + + @tailrec + def findNode(k: K, h: Int): Node[K] = + if(h == _hash && k == _key) this + else if((_next eq null) || (_hash > h)) null + else _next.findNode(k, h) + + @tailrec + def foreach[U](f: K => U): Unit = { + f(_key) + if(_next ne null) _next.foreach(f) + } + + override def toString = s"Node($key, $hash) -> $next" + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/HashTable.scala b/tests/pos-special/stdlib/collection/mutable/HashTable.scala new file mode 100644 index 000000000000..4153bd532163 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/HashTable.scala @@ -0,0 +1,417 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + +import collection.{AbstractIterator, Iterator} + +import java.lang.Integer.{numberOfLeadingZeros, rotateRight} +import scala.util.hashing.byteswap32 + +import java.lang.Integer + +/** This class can be used to construct data structures that are based + * on hashtables. Class `HashTable[A]` implements a hashtable + * that maps keys of type `A` to values of the fully abstract + * member type `Entry`. Classes that make use of `HashTable` + * have to provide an implementation for `Entry`. + * + * There are mainly two parameters that affect the performance of a hashtable: + * the initial size and the load factor. The size + * refers to the number of buckets in the hashtable, and the load + * factor is a measure of how full the hashtable is allowed to get before + * its size is automatically doubled. Both parameters may be changed by + * overriding the corresponding values in class `HashTable`. + * + * @tparam A type of the elements contained in this hash table. + */ +// Not used in the standard library, but used in scala-parallel-collections +private[collection] trait HashTable[A, B, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashUtils[A] { + // Replacing Entry type parameter by abstract type member here allows to not expose to public + // implementation-specific entry classes such as `DefaultEntry` or `LinkedEntry`. + // However, I'm afraid it's too late now for such breaking change. + import HashTable._ + + protected var _loadFactor = defaultLoadFactor + + /** The actual hash table. + */ + protected[collection] var table: Array[HashEntry[A, Entry]] = new Array(initialCapacity) + + /** The number of mappings contained in this hash table. + */ + protected[collection] var tableSize: Int = 0 + + final def size: Int = tableSize + + /** The next size value at which to resize (capacity * load factor). + */ + protected[collection] var threshold: Int = initialThreshold(_loadFactor) + + /** The array keeping track of the number of elements in 32 element blocks. + */ + protected var sizemap: Array[Int] = null + + protected var seedvalue: Int = tableSizeSeed + + protected def tableSizeSeed = Integer.bitCount(table.length - 1) + + /** The initial size of the hash table. + */ + protected def initialSize: Int = 16 + + /** The initial threshold. + */ + private def initialThreshold(_loadFactor: Int): Int = newThreshold(_loadFactor, initialCapacity) + + private def initialCapacity = capacity(initialSize) + + private def lastPopulatedIndex = { + var idx = table.length - 1 + while (table(idx) == null && idx > 0) + idx -= 1 + + idx + } + + /** + * Initializes the collection from the input stream. `readEntry` will be called for each + * entry to be read from the input stream. + */ + private[collection] def init(in: java.io.ObjectInputStream, readEntry: => Entry): Unit = { + _loadFactor = in.readInt() + assert(_loadFactor > 0) + + val size = in.readInt() + tableSize = 0 + assert(size >= 0) + + seedvalue = in.readInt() + + val smDefined = in.readBoolean() + + table = new Array(capacity(sizeForThreshold(_loadFactor, size))) + threshold = newThreshold(_loadFactor, table.length) + + if (smDefined) sizeMapInit(table.length) else sizemap = null + + var index = 0 + while (index < size) { + addEntry(readEntry) + index += 1 + } + } + + /** + * Serializes the collection to the output stream by saving the load factor, collection + * size and collection entries. `writeEntry` is responsible for writing an entry to the stream. + * + * `foreachEntry` determines the order in which the key/value pairs are saved to the stream. To + * deserialize, `init` should be used. + */ + private[collection] def serializeTo(out: java.io.ObjectOutputStream, writeEntry: Entry => Unit): Unit = { + out.writeInt(_loadFactor) + out.writeInt(tableSize) + out.writeInt(seedvalue) + out.writeBoolean(isSizeMapDefined) + + foreachEntry(writeEntry) + } + + /** Find entry with given key in table, null if not found. + */ + final def findEntry(key: A): Entry = + findEntry0(key, index(elemHashCode(key))) + + protected[collection] final def findEntry0(key: A, h: Int): Entry = { + var e = table(h).asInstanceOf[Entry] + while (e != null && !elemEquals(e.key, key)) e = e.next + e + } + + /** Add entry to table + * pre: no entry with same key exists + */ + protected[collection] final def addEntry(e: Entry): Unit = { + addEntry0(e, index(elemHashCode(e.key))) + } + + protected[collection] final def addEntry0(e: Entry, h: Int): Unit = { + e.next = table(h).asInstanceOf[Entry] + table(h) = e + tableSize = tableSize + 1 + nnSizeMapAdd(h) + if (tableSize > threshold) + resize(2 * table.length) + } + + /** Find entry with given key in table, or add new one if not found. + * May be somewhat faster then `findEntry`/`addEntry` pair as it + * computes entry's hash index only once. + * Returns entry found in table or null. + * New entries are created by calling `createNewEntry` method. + */ + def findOrAddEntry(key: A, value: B): Entry = { + val h = index(elemHashCode(key)) + val e = findEntry0(key, h) + if (e ne null) e else { addEntry0(createNewEntry(key, value), h); null } + } + + /** Creates new entry to be immediately inserted into the hashtable. + * This method is guaranteed to be called only once and in case that the entry + * will be added. In other words, an implementation may be side-effecting. + */ + def createNewEntry(key: A, value: B): Entry + + /** Remove entry from table if present. + */ + final def removeEntry(key: A) : Entry = { + removeEntry0(key, index(elemHashCode(key))) + } + /** Remove entry from table if present. + */ + private[collection] final def removeEntry0(key: A, h: Int) : Entry = { + var e = table(h).asInstanceOf[Entry] + if (e != null) { + if (elemEquals(e.key, key)) { + table(h) = e.next + tableSize = tableSize - 1 + nnSizeMapRemove(h) + e.next = null + return e + } else { + var e1 = e.next + while (e1 != null && !elemEquals(e1.key, key)) { + e = e1 + e1 = e1.next + } + if (e1 != null) { + e.next = e1.next + tableSize = tableSize - 1 + nnSizeMapRemove(h) + e1.next = null + return e1 + } + } + } + null + } + + /** An iterator returning all entries. + */ + def entriesIterator: Iterator[Entry] = new AbstractIterator[Entry] { + val iterTable = table + var idx = lastPopulatedIndex + var es = iterTable(idx) + + def hasNext = es != null + def next() = { + val res = es + es = es.next + while (es == null && idx > 0) { + idx = idx - 1 + es = iterTable(idx) + } + res.asInstanceOf[Entry] + } + } + + /** Avoid iterator for a 2x faster traversal. */ + def foreachEntry[U](f: Entry => U): Unit = { + val iterTable = table + var idx = lastPopulatedIndex + var es = iterTable(idx) + + while (es != null) { + val next = es.next // Cache next in case f removes es. + f(es.asInstanceOf[Entry]) + es = next + + while (es == null && idx > 0) { + idx -= 1 + es = iterTable(idx) + } + } + } + + /** Remove all entries from table + */ + def clearTable(): Unit = { + var i = table.length - 1 + while (i >= 0) { table(i) = null; i = i - 1 } + tableSize = 0 + nnSizeMapReset(0) + } + + private def resize(newSize: Int): Unit = { + val oldTable = table + table = new Array(newSize) + nnSizeMapReset(table.length) + var i = oldTable.length - 1 + while (i >= 0) { + var e = oldTable(i) + while (e != null) { + val h = index(elemHashCode(e.key)) + val e1 = e.next + e.next = table(h).asInstanceOf[Entry] + table(h) = e + e = e1 + nnSizeMapAdd(h) + } + i = i - 1 + } + threshold = newThreshold(_loadFactor, newSize) + } + + /* Size map handling code */ + + /* + * The following three sizeMap* functions (Add, Remove, Reset) + * are used to update the size map of the hash table. + * + * The size map logically divides the hash table into `sizeMapBucketSize` element buckets + * by keeping an integer entry for each such bucket. Each integer entry simply denotes + * the number of elements in the corresponding bucket. + * Best understood through an example, see: + * table = [/, 1, /, 6, 90, /, -3, 5] (8 entries) + * sizemap = [ 2 | 3 ] (2 entries) + * where sizeMapBucketSize == 4. + * + * By default the size map is not initialized, so these methods don't do anything, thus, + * their impact on hash table performance is negligible. However, if the hash table + * is converted into a parallel hash table, the size map is initialized, as it will be needed + * there. + */ + protected final def nnSizeMapAdd(h: Int) = if (sizemap ne null) { + sizemap(h >> sizeMapBucketBitSize) += 1 + } + + protected final def nnSizeMapRemove(h: Int) = if (sizemap ne null) { + sizemap(h >> sizeMapBucketBitSize) -= 1 + } + + protected final def nnSizeMapReset(tableLength: Int) = if (sizemap ne null) { + val nsize = calcSizeMapSize(tableLength) + if (sizemap.length != nsize) sizemap = new Array[Int](nsize) + else java.util.Arrays.fill(sizemap, 0) + } + + private[collection] final def totalSizeMapBuckets = if (sizeMapBucketSize < table.length) 1 else table.length / sizeMapBucketSize + + protected final def calcSizeMapSize(tableLength: Int) = (tableLength >> sizeMapBucketBitSize) + 1 + + // discards the previous sizemap and only allocates a new one + protected def sizeMapInit(tableLength: Int): Unit = { + sizemap = new Array[Int](calcSizeMapSize(tableLength)) + } + + // discards the previous sizemap and populates the new one + protected final def sizeMapInitAndRebuild() = { + sizeMapInit(table.length) + + // go through the buckets, count elements + var tableidx = 0 + var bucketidx = 0 + val tbl = table + var tableuntil = 0 + if (tbl.length < sizeMapBucketSize) tableuntil = tbl.length else tableuntil = sizeMapBucketSize + val totalbuckets = totalSizeMapBuckets + while (bucketidx < totalbuckets) { + var currbucketsize = 0 + while (tableidx < tableuntil) { + var e = tbl(tableidx) + while (e ne null) { + currbucketsize += 1 + e = e.next + } + tableidx += 1 + } + sizemap(bucketidx) = currbucketsize + tableuntil += sizeMapBucketSize + bucketidx += 1 + } + } + + private[collection] def printSizeMap() = { + println(sizemap.to(collection.immutable.List)) + } + + protected final def sizeMapDisable() = sizemap = null + + protected final def isSizeMapDefined = sizemap ne null + + // override to automatically initialize the size map + protected def alwaysInitSizeMap = false + + /* End of size map handling code */ + + protected def elemEquals(key1: A, key2: A): Boolean = (key1 == key2) + + /** + * Note: we take the most significant bits of the hashcode, not the lower ones + * this is of crucial importance when populating the table in parallel + */ + protected[collection] final def index(hcode: Int): Int = { + val ones = table.length - 1 + val exponent = Integer.numberOfLeadingZeros(ones) + (improve(hcode, seedvalue) >>> exponent) & ones + } +} + +private[collection] object HashTable { + /** The load factor for the hash table (in 0.001 step). + */ + private[collection] final def defaultLoadFactor: Int = 750 // corresponds to 75% + private[collection] final def loadFactorDenum = 1000 // should be loadFactorDenom, but changing that isn't binary compatible + + private[collection] final def newThreshold(_loadFactor: Int, size: Int) = ((size.toLong * _loadFactor) / loadFactorDenum).toInt + + private[collection] final def sizeForThreshold(_loadFactor: Int, thr: Int) = ((thr.toLong * loadFactorDenum) / _loadFactor).toInt + + private[collection] final def capacity(expectedSize: Int) = nextPositivePowerOfTwo(expectedSize) + + trait HashUtils[KeyType] { + protected final def sizeMapBucketBitSize = 5 + // so that: + protected final def sizeMapBucketSize = 1 << sizeMapBucketBitSize + + protected[collection] def elemHashCode(key: KeyType) = key.## + + /** + * Defer to a high-quality hash in [[scala.util.hashing]]. + * The goal is to distribute across bins as well as possible even if a hash code has low entropy at some bits. + *

+ * OLD VERSION - quick, but bad for sequence 0-10000 - little entropy in higher bits - since 2003 + * {{{ + * var h: Int = hcode + ~(hcode << 9) + * h = h ^ (h >>> 14) + * h = h + (h << 4) + * h ^ (h >>> 10) + * }}} + * the rest of the computation is due to SI-5293 + */ + protected final def improve(hcode: Int, seed: Int): Int = rotateRight(byteswap32(hcode), seed) + } + + /** + * Returns a power of two >= `target`. + */ + private[collection] def nextPositivePowerOfTwo(target: Int): Int = 1 << -numberOfLeadingZeros(target - 1) +} + +/** Class used internally. + */ +private[collection] trait HashEntry[A, E <: HashEntry[A, E]] { + val key: A + var next: E = _ +} diff --git a/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala new file mode 100644 index 000000000000..c801f073fb0d --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala @@ -0,0 +1,31 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + + +/** + * Reusable builder for immutable collections + */ +abstract class ImmutableBuilder[-A, C <: IterableOnce[_]](empty: C) + extends ReusableBuilder[A, C] { + + protected var elems: C = empty + + def clear(): Unit = { elems = empty } + + def result(): C = elems + + override def knownSize: Int = elems.knownSize +} diff --git a/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala b/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala new file mode 100644 index 000000000000..24d54905de22 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala @@ -0,0 +1,83 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +trait IndexedSeq[T] extends Seq[T] + with scala.collection.IndexedSeq[T] + with IndexedSeqOps[T, IndexedSeq, IndexedSeq[T]] + with IterableFactoryDefaults[T, IndexedSeq] { + + override def iterableFactory: SeqFactory[IndexedSeq] = IndexedSeq +} + +@SerialVersionUID(3L) +object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](ArrayBuffer) + +trait IndexedSeqOps[A, +CC[_], +C <: AnyRef] + extends scala.collection.IndexedSeqOps[A, CC, C] + with SeqOps[A, CC, C] { + + /** Modifies this $coll by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return this $coll modified by replacing all elements with the + * result of applying the given function `f` to each element + * of this $coll. + */ + def mapInPlace(f: A => A): this.type = { + var i = 0 + val siz = size + while (i < siz) { this(i) = f(this(i)); i += 1 } + this + } + + /** Sorts this $coll in place according to an Ordering. + * + * @see [[scala.collection.SeqOps.sorted]] + * @param ord the ordering to be used to compare elements. + * @return modified input $coll sorted according to the ordering `ord`. + */ + def sortInPlace[B >: A]()(implicit ord: Ordering[B]): this.type = { + val len = this.length + if (len > 1) { + val arr = new Array[AnyRef](len) + var i = 0 + for (x <- this) { + arr(i) = x.asInstanceOf[AnyRef] + i += 1 + } + java.util.Arrays.sort(arr, ord.asInstanceOf[Ordering[Object]]) + i = 0 + while (i < arr.length) { + update(i, arr(i).asInstanceOf[A]) + i += 1 + } + } + this + } + + /** Sorts this $coll in place according to a comparison function. + * + * @see [[scala.collection.SeqOps.sortWith]] + */ + def sortInPlaceWith(lt: (A, A) => Boolean): this.type = sortInPlace()(Ordering.fromLessThan(lt)) + + /** Sorts this $coll in place according to the Ordering which results from transforming + * an implicitly given Ordering with a transformation function. + * + * @see [[scala.collection.SeqOps.sortBy]] + */ + def sortInPlaceBy[B](f: A => B)(implicit ord: Ordering[B]): this.type = sortInPlace()(ord on f) + +} diff --git a/tests/pos-special/stdlib/collection/mutable/Iterable.scala b/tests/pos-special/stdlib/collection/mutable/Iterable.scala index bf286157b376..d05aeed88044 100644 --- a/tests/pos-special/stdlib/collection/mutable/Iterable.scala +++ b/tests/pos-special/stdlib/collection/mutable/Iterable.scala @@ -13,13 +13,11 @@ package scala.collection.mutable import scala.collection.{IterableFactory, IterableFactoryDefaults} -import language.experimental.captureChecking trait Iterable[A] extends collection.Iterable[A] with collection.IterableOps[A, Iterable, Iterable[A]] with IterableFactoryDefaults[A, Iterable] { - this: Iterable[A]^ => override def iterableFactory: IterableFactory[Iterable] = Iterable } @@ -33,5 +31,4 @@ trait Iterable[A] object Iterable extends IterableFactory.Delegate[Iterable](ArrayBuffer) /** Explicit instantiation of the `Iterable` trait to reduce class file size in subclasses. */ -abstract class AbstractIterable[A] extends scala.collection.AbstractIterable[A] with Iterable[A]: - this: AbstractIterable[A]^ => +abstract class AbstractIterable[A] extends scala.collection.AbstractIterable[A] with Iterable[A] diff --git a/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala b/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala new file mode 100644 index 000000000000..bc663f1d37d8 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala @@ -0,0 +1,509 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.annotation.{nowarn, tailrec} +import scala.collection.generic.DefaultSerializable +import scala.util.hashing.MurmurHash3 + + +/** This class implements mutable maps using a hashtable. + * The iterator and all traversal methods of this class visit elements in the order they were inserted. + * + * @tparam K the type of the keys contained in this hash map. + * @tparam V the type of the values assigned to keys in this hash map. + * + * @define Coll `LinkedHashMap` + * @define coll linked hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define orderDependent + * @define orderDependentFold + */ +@deprecatedInheritance("LinkedHashMap will be made final; use .withDefault for the common use case of computing a default value", "2.13.11") +class LinkedHashMap[K, V] + extends AbstractMap[K, V] + with SeqMap[K, V] + with MapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, LinkedHashMap[K, V]] + with StrictOptimizedMapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]] + with MapFactoryDefaults[K, V, LinkedHashMap, Iterable] + with DefaultSerializable { + + override def mapFactory: MapFactory[LinkedHashMap] = LinkedHashMap + + // stepper / keyStepper / valueStepper are not overridden to use XTableStepper because that stepper + // would not return the elements in insertion order + + private[collection] type Entry = LinkedHashMap.LinkedEntry[K, V] + + private[collection] def _firstEntry: Entry = firstEntry + + protected var firstEntry: Entry = null + + protected var lastEntry: Entry = null + + /* Uses the same implementation as mutable.HashMap. The hashtable holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. + * - Every bucket is sorted in ascendant hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + private[this] var table = new Array[Entry](tableSizeFor(LinkedHashMap.defaultinitialSize)) + + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def last: (K, V) = + if (size > 0) (lastEntry.key, lastEntry.value) + else throw new NoSuchElementException("Cannot call .last on empty LinkedHashMap") + + override def lastOption: Option[(K, V)] = + if (size > 0) Some((lastEntry.key, lastEntry.value)) + else None + + override def head: (K, V) = + if (size > 0) (firstEntry.key, firstEntry.value) + else throw new NoSuchElementException("Cannot call .head on empty LinkedHashMap") + + override def headOption: Option[(K, V)] = + if (size > 0) Some((firstEntry.key, firstEntry.value)) + else None + + override def size = contentSize + override def knownSize: Int = size + override def isEmpty: Boolean = size == 0 + + def get(key: K): Option[V] = { + val e = findEntry(key) + if (e == null) None + else Some(e.value) + } + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / LinkedHashMap.defaultLoadFactor).toInt) + if (target > table.length) growTable(target) + } + + override def contains(key: K): Boolean = { + if (getClass eq classOf[LinkedHashMap[_, _]]) + findEntry(key) != null + else + super.contains(key) // A subclass might override `get`, use the default implementation `contains`. + } + + override def put(key: K, value: V): Option[V] = put0(key, value, true) match { + case null => None + case sm => sm + } + + override def update(key: K, value: V): Unit = put0(key, value, false) + + override def remove(key: K): Option[V] = removeEntry0(key) match { + case null => None + case nd => Some(nd.value) + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + if (getClass != classOf[LinkedHashMap[_, _]]) { + // subclasses of LinkedHashMap might customise `get` ... + super.getOrElse(key, default) + } else { + // .. but in the common case, we can avoid the Option boxing. + val nd = findEntry(key) + if (nd eq null) default else nd.value + } + } + + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + if (getClass != classOf[LinkedHashMap[_, _]]) { + // subclasses of LinkedHashMap might customise `get` ... + super.getOrElseUpdate(key, defaultValue) + } else { + val hash = computeHash(key) + val idx = index(hash) + val nd = table(idx) match { + case null => null + case nd => nd.findEntry(key, hash) + } + if (nd != null) nd.value + else { + val table0 = table + val default = defaultValue + if (contentSize + 1 >= threshold) growTable(table.length * 2) + // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. + val newIdx = if (table0 eq table) idx else index(hash) + put0(key, default, false, hash, newIdx) + default + } + } + } + + private[this] def removeEntry0(elem: K): Entry = removeEntry0(elem, computeHash(elem)) + + /** Removes a key from this map if it exists + * + * @param elem the element to remove + * @param hash the **improved** hashcode of `element` (see computeHash) + * @return the node that contained element if it was present, otherwise null + */ + private[this] def removeEntry0(elem: K, hash: Int): Entry = { + val idx = index(hash) + table(idx) match { + case null => null + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + deleteEntry(nd) + contentSize -= 1 + nd + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while ((next ne null) && next.hash <= hash) { + if (next.hash == hash && next.key == elem) { + prev.next = next.next + deleteEntry(next) + contentSize -= 1 + return next + } + prev = next + next = next.next + } + null + } + } + + /** Computes the improved hash of an original (`any.##`) hash. */ + @`inline` private[this] def improveHash(originalHash: Int): Int = { + originalHash ^ (originalHash >>> 16) + } + @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) + + /** Computes the improved hash of this key */ + @`inline` private[this] def computeHash(o: K): Int = improveHash(o.##) + + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) + + @`inline` private[this] def findEntry(key: K): Entry = { + val hash = computeHash(key) + table(index(hash)) match { + case null => null + case nd => nd.findEntry(key, hash) + } + } + + def addOne(kv: (K, V)): this.type = { + put(kv._1, kv._2) + this + } + + def subtractOne(key: K): this.type = { + remove(key) + this + } + + private[this] abstract class LinkedHashMapIterator[T] extends AbstractIterator[T] { + private[this] var cur = firstEntry + def extract(nd: Entry): T + def hasNext: Boolean = cur ne null + def next(): T = + if (hasNext) { val r = extract(cur); cur = cur.later; r } + else Iterator.empty.next() + } + + def iterator: Iterator[(K, V)] = + if (size == 0) Iterator.empty + else new LinkedHashMapIterator[(K, V)] { + def extract(nd: Entry): (K, V) = (nd.key, nd.value) + } + + protected class LinkedKeySet extends KeySet { + override def iterableFactory: IterableFactory[collection.Set] = LinkedHashSet + } + + override def keySet: collection.Set[K] = new LinkedKeySet + + override def keysIterator: Iterator[K] = + if (size == 0) Iterator.empty + else new LinkedHashMapIterator[K] { + def extract(nd: Entry): K = nd.key + } + + private[collection] def entryIterator: Iterator[Entry] = + if (size == 0) Iterator.empty + else new LinkedHashMapIterator[Entry] { + def extract(nd: Entry): Entry = nd + } + + + // Override updateWith for performance, so we can do the update while hashing + // the input key only once and performing one lookup into the hash table + override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + if (getClass != classOf[LinkedHashMap[_, _]]) { + // subclasses of LinkedHashMap might customise `get` ... + super.updateWith(key)(remappingFunction) + } else { + val hash = computeHash(key) + val indexedHash = index(hash) + + var foundEntry: Entry = null + var previousEntry: Entry = null + table(indexedHash) match { + case null => + case nd => + @tailrec + def findEntry(prev: Entry, nd: Entry, k: K, h: Int): Unit = { + if (h == nd.hash && k == nd.key) { + previousEntry = prev + foundEntry = nd + } + else if ((nd.next eq null) || (nd.hash > h)) () + else findEntry(nd, nd.next, k, h) + } + + findEntry(null, nd, key, hash) + } + + val previousValue = foundEntry match { + case null => None + case nd => Some(nd.value) + } + + val nextValue = remappingFunction(previousValue) + + (previousValue, nextValue) match { + case (None, None) => // do nothing + + case (Some(_), None) => + if (previousEntry != null) previousEntry.next = foundEntry.next + else table(indexedHash) = foundEntry.next + deleteEntry(foundEntry) + contentSize -= 1 + + case (None, Some(value)) => + val newIndexedHash = + if (contentSize + 1 >= threshold) { + growTable(table.length * 2) + index(hash) + } else indexedHash + put0(key, value, false, hash, newIndexedHash) + + case (Some(_), Some(newValue)) => foundEntry.value = newValue + } + nextValue + } + } + + override def valuesIterator: Iterator[V] = + if (size == 0) Iterator.empty + else new LinkedHashMapIterator[V] { + def extract(nd: Entry): V = nd.value + } + + + override def foreach[U](f: ((K, V)) => U): Unit = { + var cur = firstEntry + while (cur ne null) { + f((cur.key, cur.value)) + cur = cur.later + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + var cur = firstEntry + while (cur ne null) { + f(cur.key, cur.value) + cur = cur.later + } + } + + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + firstEntry = null + lastEntry = null + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity - 1).max(4)) * 2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * LinkedHashMap.defaultLoadFactor).toInt + + /*create a new entry. If table is empty(firstEntry is null), then the + * new entry will be the firstEntry. If not, just set the new entry to + * be the lastEntry. + * */ + private[this] def createNewEntry(key: K, hash: Int, value: V): Entry = { + val e = new Entry(key, hash, value) + if (firstEntry eq null) firstEntry = e + else { + lastEntry.later = e + e.earlier = lastEntry + } + lastEntry = e + e + } + + /** Delete the entry from the LinkedHashMap, set the `earlier` and `later` pointers correctly */ + private[this] def deleteEntry(e: Entry): Unit = { + if (e.earlier eq null) firstEntry = e.later + else e.earlier.later = e.later + if (e.later eq null) lastEntry = e.earlier + else e.later.earlier = e.earlier + e.earlier = null + e.later = null + e.next = null + } + + private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { + if (contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(key) + val idx = index(hash) + put0(key, value, getOld, hash, idx) + } + + private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { + table(idx) match { + case null => + table(idx) = createNewEntry(key, hash, value) + case old => + var prev: Entry = null + var n = old + while ((n ne null) && n.hash <= hash) { + if (n.hash == hash && key == n.key) { + val old = n.value + n.value = value + return if (getOld) Some(old) else null + } + prev = n + n = n.next + } + val nnode = createNewEntry(key, hash, value) + if (prev eq null) { + nnode.next = old + table(idx) = nnode + } else { + nnode.next = prev.next + prev.next = nnode + } + } + contentSize += 1 + null + } + + private[this] def growTable(newlen: Int): Unit = { + if (newlen < 0) + throw new RuntimeException(s"new hash table size $newlen exceeds maximum") + var oldlen = table.length + threshold = newThreshold(newlen) + if (size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow = new Entry(null.asInstanceOf[K], 0, null.asInstanceOf[V]) + val preHigh = new Entry(null.asInstanceOf[K], 0, null.asInstanceOf[V]) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while (oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if (old ne null) { + preLow.next = null + preHigh.next = null + var lastLow = preLow + var lastHigh = preHigh + var n = old + while (n ne null) { + val next = n.next + if ((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if (old ne preLow.next) table(i) = preLow.next + if (preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } + } + + override def hashCode: Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + val tupleHashIterator = new LinkedHashMapIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override def extract(nd: Entry): Any = { + hash = MurmurHash3.tuple2Hash(unimproveHash(nd.hash), nd.value.##) + this + } + } + MurmurHash3.unorderedHash(tupleHashIterator, MurmurHash3.mapSeed) + } + } + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "LinkedHashMap" +} + +/** $factoryInfo + * @define Coll `LinkedHashMap` + * @define coll linked hash map + */ +@SerialVersionUID(3L) +object LinkedHashMap extends MapFactory[LinkedHashMap] { + + def empty[K, V] = new LinkedHashMap[K, V] + + def from[K, V](it: collection.IterableOnce[(K, V)]) = { + val newlhm = empty[K, V] + newlhm.sizeHint(it.knownSize) + newlhm.addAll(it) + newlhm + } + + def newBuilder[K, V] = new GrowableBuilder(empty[K, V]) + + /** Class for the linked hash map entry, used internally. + */ + private[mutable] final class LinkedEntry[K, V](val key: K, val hash: Int, var value: V) { + var earlier: LinkedEntry[K, V] = null + var later: LinkedEntry[K, V] = null + var next: LinkedEntry[K, V] = null + + @tailrec + final def findEntry(k: K, h: Int): LinkedEntry[K, V] = + if (h == hash && k == key) this + else if ((next eq null) || (hash > h)) null + else next.findEntry(k, h) + } + + /** The default load factor for the hash table */ + private[collection] final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + private[collection] final def defaultinitialSize: Int = 16 +} diff --git a/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala b/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala new file mode 100644 index 000000000000..0c01f8ea79ea --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala @@ -0,0 +1,348 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.annotation.{nowarn, tailrec} +import scala.collection.generic.DefaultSerializable +import scala.util.hashing.MurmurHash3 + +/** This class implements mutable sets using a hashtable. + * The iterator and all traversal methods of this class visit elements in the order they were inserted. + * + * @tparam A the type of the elements contained in this set. + * + * @define Coll `LinkedHashSet` + * @define coll linked hash set + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define orderDependent + * @define orderDependentFold + */ +@deprecatedInheritance("LinkedHashSet will be made final", "2.13.11") +class LinkedHashSet[A] + extends AbstractSet[A] + with SetOps[A, LinkedHashSet, LinkedHashSet[A]] + with StrictOptimizedIterableOps[A, LinkedHashSet, LinkedHashSet[A]] + with IterableFactoryDefaults[A, LinkedHashSet] + with DefaultSerializable { + + override def iterableFactory: IterableFactory[LinkedHashSet] = LinkedHashSet + + // stepper is not overridden to use XTableStepper because that stepper would not return the + // elements in insertion order + + /*private*/ type Entry = LinkedHashSet.Entry[A] + + protected var firstEntry: Entry = null + + protected var lastEntry: Entry = null + + /* Uses the same implementation as mutable.HashSet. The hashtable holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i. + * - Every bucket is sorted in ascendant hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + private[this] var table = new Array[Entry](tableSizeFor(LinkedHashSet.defaultinitialSize)) + + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def last: A = + if (size > 0) lastEntry.key + else throw new NoSuchElementException("Cannot call .last on empty LinkedHashSet") + + override def lastOption: Option[A] = + if (size > 0) Some(lastEntry.key) + else None + + override def head: A = + if (size > 0) firstEntry.key + else throw new NoSuchElementException("Cannot call .head on empty LinkedHashSet") + + override def headOption: Option[A] = + if (size > 0) Some(firstEntry.key) + else None + + override def size: Int = contentSize + override def knownSize: Int = size + override def isEmpty: Boolean = size == 0 + + def contains(elem: A): Boolean = findEntry(elem) ne null + + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / LinkedHashSet.defaultLoadFactor).toInt) + if (target > table.length) growTable(target) + } + + override def add(elem: A): Boolean = { + if (contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(elem) + put0(elem, hash, index(hash)) + } + + def addOne(elem: A): this.type = { + add(elem) + this + } + + def subtractOne(elem: A): this.type = { + remove(elem) + this + } + + override def remove(elem: A): Boolean = remove0(elem, computeHash(elem)) + + private[this] abstract class LinkedHashSetIterator[T] extends AbstractIterator[T] { + private[this] var cur = firstEntry + def extract(nd: Entry): T + def hasNext: Boolean = cur ne null + def next(): T = + if (hasNext) { val r = extract(cur); cur = cur.later; r } + else Iterator.empty.next() + } + + def iterator: Iterator[A] = new LinkedHashSetIterator[A] { + override def extract(nd: Entry): A = nd.key + } + + private[collection] def entryIterator: Iterator[Entry] = new LinkedHashSetIterator[Entry] { + override def extract(nd: Entry): Entry = nd + } + + override def foreach[U](f: A => U): Unit = { + var cur = firstEntry + while (cur ne null) { + f(cur.key) + cur = cur.later + } + } + + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + firstEntry = null + lastEntry = null + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity - 1).max(4)) * 2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * LinkedHashSet.defaultLoadFactor).toInt + + @`inline` private[this] def improveHash(originalHash: Int): Int = { + originalHash ^ (originalHash >>> 16) + } + + @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) + + /** Computes the improved hash of this key */ + @`inline` private[this] def computeHash(o: A): Int = improveHash(o.##) + + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) + + @`inline` private[this] def findEntry(key: A): Entry = { + val hash = computeHash(key) + table(index(hash)) match { + case null => null + case nd => nd.findEntry(key, hash) + } + } + + /*create a new entry. If table is empty(firstEntry is null), then the + * new entry will be the firstEntry. If not, just set the new entry to + * be the lastEntry. + * */ + private[this] def createNewEntry(key: A, hash: Int): Entry = { + val e = new Entry(key, hash) + if (firstEntry eq null) firstEntry = e + else { + lastEntry.later = e + e.earlier = lastEntry + } + lastEntry = e + e + } + + /** Delete the entry from the LinkedHashSet, set the `earlier` and `later` pointers correctly */ + private[this] def deleteEntry(e: Entry): Unit = { + if (e.earlier eq null) firstEntry = e.later + else e.earlier.later = e.later + if (e.later eq null) lastEntry = e.earlier + else e.later.earlier = e.earlier + e.earlier = null + e.later = null + e.next = null + } + + private[this] def put0(elem: A, hash: Int, idx: Int): Boolean = { + table(idx) match { + case null => + table(idx) = createNewEntry(elem, hash) + case old => + var prev: Entry = null + var n = old + while ((n ne null) && n.hash <= hash) { + if (n.hash == hash && elem == n.key) return false + prev = n + n = n.next + } + val nnode = createNewEntry(elem, hash) + if (prev eq null) { + nnode.next = old + table(idx) = nnode + } else { + nnode.next = prev.next + prev.next = nnode + } + } + contentSize += 1 + true + } + + private[this] def remove0(elem: A, hash: Int): Boolean = { + val idx = index(hash) + table(idx) match { + case null => false + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + deleteEntry(nd) + contentSize -= 1 + true + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while ((next ne null) && next.hash <= hash) { + if (next.hash == hash && next.key == elem) { + prev.next = next.next + deleteEntry(next) + contentSize -= 1 + return true + } + prev = next + next = next.next + } + false + } + } + + private[this] def growTable(newlen: Int): Unit = { + if (newlen < 0) + throw new RuntimeException(s"new hash table size $newlen exceeds maximum") + var oldlen = table.length + threshold = newThreshold(newlen) + if (size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow = new Entry(null.asInstanceOf[A], 0) + val preHigh = new Entry(null.asInstanceOf[A], 0) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while (oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if (old ne null) { + preLow.next = null + preHigh.next = null + var lastLow = preLow + var lastHigh = preHigh + var n = old + while (n ne null) { + val next = n.next + if ((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if (old ne preLow.next) table(i) = preLow.next + if (preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } + } + + override def hashCode: Int = { + val setHashIterator = + if (isEmpty) this.iterator + else { + new LinkedHashSetIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override def extract(nd: Entry): Any = { + hash = unimproveHash(nd.hash) + this + } + } + } + MurmurHash3.unorderedHash(setHashIterator, MurmurHash3.setSeed) + } + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "LinkedHashSet" +} + +/** $factoryInfo + * @define Coll `LinkedHashSet` + * @define coll linked hash set + */ +@SerialVersionUID(3L) +object LinkedHashSet extends IterableFactory[LinkedHashSet] { + + override def empty[A]: LinkedHashSet[A] = new LinkedHashSet[A] + + def from[E](it: collection.IterableOnce[E]) = { + val newlhs = empty[E] + newlhs.sizeHint(it.knownSize) + newlhs.addAll(it) + newlhs + } + + def newBuilder[A] = new GrowableBuilder(empty[A]) + + /** Class for the linked hash set entry, used internally. + */ + private[mutable] final class Entry[A](val key: A, val hash: Int) { + var earlier: Entry[A] = null + var later: Entry[A] = null + var next: Entry[A] = null + + @tailrec + final def findEntry(k: A, h: Int): Entry[A] = + if (h == hash && k == key) this + else if ((next eq null) || (hash > h)) null + else next.findEntry(k, h) + } + + /** The default load factor for the hash table */ + private[collection] final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + private[collection] final def defaultinitialSize: Int = 16 +} + diff --git a/tests/pos-special/stdlib/collection/mutable/ListBuffer.scala b/tests/pos-special/stdlib/collection/mutable/ListBuffer.scala index 4f607c770130..d66525763163 100644 --- a/tests/pos-special/stdlib/collection/mutable/ListBuffer.scala +++ b/tests/pos-special/stdlib/collection/mutable/ListBuffer.scala @@ -19,8 +19,6 @@ import java.lang.{IllegalArgumentException, IndexOutOfBoundsException} import scala.collection.generic.DefaultSerializable import scala.runtime.Statics.releaseFence -import scala.annotation.unchecked.uncheckedCaptures -import language.experimental.captureChecking /** A `Buffer` implementation backed by a list. It provides constant time * prepend and append. Most other operations are linear. @@ -38,7 +36,7 @@ import language.experimental.captureChecking * @define willNotTerminateInf */ @SerialVersionUID(-8428291952499836345L) -class ListBuffer[sealed A] +class ListBuffer[A] extends AbstractBuffer[A] with SeqOps[A, ListBuffer, ListBuffer[A]] with StrictOptimizedSeqOps[A, ListBuffer, ListBuffer[A]] @@ -123,7 +121,7 @@ class ListBuffer[sealed A] } // MUST only be called on fresh instances - private def freshFrom(xs: IterableOnce[A]^): this.type = { + private def freshFrom(xs: IterableOnce[A]): this.type = { val it = xs.iterator if (it.hasNext) { var len = 1 @@ -142,7 +140,7 @@ class ListBuffer[sealed A] this } - override final def addAll(xs: IterableOnce[A]^): this.type = { + override final def addAll(xs: IterableOnce[A]): this.type = { val it = xs.iterator if (it.hasNext) { val fresh = new ListBuffer[A].freshFrom(it) @@ -250,7 +248,7 @@ class ListBuffer[sealed A] } } - def insertAll(idx: Int, elems: IterableOnce[A]^): Unit = { + def insertAll(idx: Int, elems: IterableOnce[A]): Unit = { if (idx < 0 || idx > len) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${len-1})") val it = elems.iterator if (it.hasNext) { @@ -307,7 +305,7 @@ class ListBuffer[sealed A] this } - def flatMapInPlace(f: A => IterableOnce[A]^): this.type = { + def flatMapInPlace(f: A => IterableOnce[A]): this.type = { mutationCount += 1 var src = first var dst: List[A] = null @@ -347,7 +345,7 @@ class ListBuffer[sealed A] this } - def patchInPlace(from: Int, patch: collection.IterableOnce[A]^, replaced: Int): this.type = { + def patchInPlace(from: Int, patch: collection.IterableOnce[A], replaced: Int): this.type = { val _len = len val _from = math.max(from, 0) // normalized val _replaced = math.max(replaced, 0) // normalized @@ -397,9 +395,9 @@ class ListBuffer[sealed A] @SerialVersionUID(3L) object ListBuffer extends StrictOptimizedSeqFactory[ListBuffer] { - def from[sealed A](coll: collection.IterableOnce[A]^): ListBuffer[A] = new ListBuffer[A].freshFrom(coll) + def from[A](coll: collection.IterableOnce[A]): ListBuffer[A] = new ListBuffer[A].freshFrom(coll) - def newBuilder[sealed A]: Builder[A, ListBuffer[A]] = new GrowableBuilder(empty[A]) + def newBuilder[A]: Builder[A, ListBuffer[A]] = new GrowableBuilder(empty[A]) - def empty[A]: ListBuffer[A] = new ListBuffer[A @uncheckedCaptures] + def empty[A]: ListBuffer[A] = new ListBuffer[A] } diff --git a/tests/pos-special/stdlib/collection/mutable/ListMap.scala b/tests/pos-special/stdlib/collection/mutable/ListMap.scala new file mode 100644 index 000000000000..7cc5aa227757 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ListMap.scala @@ -0,0 +1,82 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.tailrec +import scala.collection.generic.DefaultSerializable +import scala.collection.immutable.List + +/** A simple mutable map backed by a list, so it preserves insertion order. + * + * @tparam K the type of the keys contained in this list map. + * @tparam V the type of the values assigned to keys in this list map. + * + * @define Coll `mutable.ListMap` + * @define coll mutable list map + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define orderDependent + * @define orderDependentFold + */ +@deprecated("Use an immutable.ListMap assigned to a var instead of mutable.ListMap", "2.13.0") +class ListMap[K, V] + extends AbstractMap[K, V] + with MapOps[K, V, ListMap, ListMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, ListMap[K, V]] + with StrictOptimizedMapOps[K, V, ListMap, ListMap[K, V]] + with MapFactoryDefaults[K, V, ListMap, Iterable] + with DefaultSerializable { + + override def mapFactory: MapFactory[ListMap] = ListMap + + private[this] var elems: List[(K, V)] = List() + private[this] var siz: Int = 0 + + def get(key: K): Option[V] = elems find (_._1 == key) map (_._2) + def iterator: Iterator[(K, V)] = elems.iterator + + final override def addOne(kv: (K, V)) = { + val (e, key0) = remove(kv._1, elems, List()) + elems = (key0, kv._2) :: e + siz += 1; this + } + + final override def subtractOne(key: K) = { elems = remove(key, elems, List())._1; this } + + @tailrec + private def remove(key: K, elems: List[(K, V)], acc: List[(K, V)]): (List[(K, V)], K) = { + if (elems.isEmpty) (acc, key) + else if (elems.head._1 == key) { siz -= 1; (acc ::: elems.tail, elems.head._1) } + else remove(key, elems.tail, elems.head :: acc) + } + + final override def clear(): Unit = { elems = List(); siz = 0 } + + final override def size: Int = siz + override def knownSize: Int = size + override def isEmpty: Boolean = size == 0 + override protected[this] def stringPrefix = "ListMap" +} + +/** $factoryInfo + * @define Coll `mutable.ListMap` + * @define coll mutable list map + */ +@SerialVersionUID(3L) +@deprecated("Use an immutable.ListMap assigned to a var instead of mutable.ListMap", "2.13.0") +object ListMap extends MapFactory[ListMap] { + def empty[K, V]: ListMap[K, V] = new ListMap[K, V] + def from[K, V](it: IterableOnce[(K, V)]): ListMap[K,V] = Growable.from(empty[K, V], it) + def newBuilder[K, V]: Builder[(K, V), ListMap[K,V]] = new GrowableBuilder(empty[K, V]) +} diff --git a/tests/pos-special/stdlib/collection/mutable/LongMap.scala b/tests/pos-special/stdlib/collection/mutable/LongMap.scala new file mode 100644 index 000000000000..af34ca4ab8c9 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/LongMap.scala @@ -0,0 +1,673 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.collection.generic.DefaultSerializationProxy +import scala.language.implicitConversions + +/** This class implements mutable maps with `Long` keys based on a hash table with open addressing. + * + * Basic map operations on single entries, including `contains` and `get`, + * are typically substantially faster with `LongMap` than [[HashMap]]. Methods + * that act on the whole map, including `foreach` and `map` are not in + * general expected to be faster than with a generic map, save for those + * that take particular advantage of the internal structure of the map: + * `foreachKey`, `foreachValue`, `mapValuesNow`, and `transformValues`. + * + * Maps with open addressing may become less efficient at lookup after + * repeated addition/removal of elements. Although `LongMap` makes a + * decent attempt to remain efficient regardless, calling `repack` + * on a map that will no longer have elements removed but will be + * used heavily may save both time and storage space. + * + * This map is not intended to contain more than 2^29 entries (approximately + * 500 million). The maximum capacity is 2^30, but performance will degrade + * rapidly as 2^30 is approached. + * + */ +final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBufferSize: Int, initBlank: Boolean) + extends AbstractMap[Long, V] + with MapOps[Long, V, Map, LongMap[V]] + with StrictOptimizedIterableOps[(Long, V), Iterable, LongMap[V]] + with Serializable { + import LongMap._ + + def this() = this(LongMap.exceptionDefault, 16, true) + + // TODO: override clear() with an optimization more tailored for efficiency. + override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, V)]): LongMap[V] = { + //TODO should this be the default implementation of this method in StrictOptimizedIterableOps? + val b = newSpecificBuilder + b.sizeHint(coll) + b.addAll(coll) + b.result() + } + override protected def newSpecificBuilder: Builder[(Long, V),LongMap[V]] = new GrowableBuilder(LongMap.empty[V]) + + /** Creates a new `LongMap` that returns default values according to a supplied key-value mapping. */ + def this(defaultEntry: Long => V) = this(defaultEntry, 16, true) + + /** Creates a new `LongMap` with an initial buffer of specified size. + * + * A LongMap can typically contain half as many elements as its buffer size + * before it requires resizing. + */ + def this(initialBufferSize: Int) = this(LongMap.exceptionDefault, initialBufferSize, true) + + /** Creates a new `LongMap` with specified default values and initial buffer size. */ + def this(defaultEntry: Long => V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) + + private[this] var mask = 0 + private[this] var extraKeys: Int = 0 + private[this] var zeroValue: AnyRef = null + private[this] var minValue: AnyRef = null + private[this] var _size = 0 + private[this] var _vacant = 0 + private[this] var _keys: Array[Long] = null + private[this] var _values: Array[AnyRef] = null + + if (initBlank) defaultInitialize(initialBufferSize) + + private[this] def defaultInitialize(n: Int) = { + mask = + if (n<0) 0x7 + else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7 + _keys = new Array[Long](mask+1) + _values = new Array[AnyRef](mask+1) + } + + private[collection] def initializeTo( + m: Int, ek: Int, zv: AnyRef, mv: AnyRef, sz: Int, vc: Int, kz: Array[Long], vz: Array[AnyRef] + ): Unit = { + mask = m; extraKeys = ek; zeroValue = zv; minValue = mv; _size = sz; _vacant = vc; _keys = kz; _values = vz + } + + override def size: Int = _size + (extraKeys+1)/2 + override def knownSize: Int = size + override def isEmpty: Boolean = size == 0 + override def empty: LongMap[V] = new LongMap() + + private def imbalanced: Boolean = + (_size + _vacant) > 0.5*mask || _vacant > _size + + private def toIndex(k: Long): Int = { + // Part of the MurmurHash3 32 bit finalizer + val h = ((k ^ (k >>> 32)) & 0xFFFFFFFFL).toInt + val x = (h ^ (h >>> 16)) * 0x85EBCA6B + (x ^ (x >>> 13)) & mask + } + + private def seekEmpty(k: Long): Int = { + var e = toIndex(k) + var x = 0 + while (_keys(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } + e + } + + private def seekEntry(k: Long): Int = { + var e = toIndex(k) + var x = 0 + var q = 0L + while ({ q = _keys(e); if (q==k) return e; q != 0}) { x += 1; e = (e + 2*(x+1)*x - 3) & mask } + e | MissingBit + } + + private def seekEntryOrOpen(k: Long): Int = { + var e = toIndex(k) + var x = 0 + var q = 0L + while ({ q = _keys(e); if (q==k) return e; q+q != 0}) { + x += 1 + e = (e + 2*(x+1)*x - 3) & mask + } + if (q == 0) return e | MissingBit + val o = e | MissVacant + while ({ q = _keys(e); if (q==k) return e; q != 0}) { + x += 1 + e = (e + 2*(x+1)*x - 3) & mask + } + o + } + + override def contains(key: Long): Boolean = { + if (key == -key) (((key>>>63).toInt+1) & extraKeys) != 0 + else seekEntry(key) >= 0 + } + + override def get(key: Long): Option[V] = { + if (key == -key) { + if ((((key>>>63).toInt+1) & extraKeys) == 0) None + else if (key == 0) Some(zeroValue.asInstanceOf[V]) + else Some(minValue.asInstanceOf[V]) + } + else { + val i = seekEntry(key) + if (i < 0) None else Some(_values(i).asInstanceOf[V]) + } + } + + override def getOrElse[V1 >: V](key: Long, default: => V1): V1 = { + if (key == -key) { + if ((((key>>>63).toInt+1) & extraKeys) == 0) default + else if (key == 0) zeroValue.asInstanceOf[V1] + else minValue.asInstanceOf[V1] + } + else { + val i = seekEntry(key) + if (i < 0) default else _values(i).asInstanceOf[V1] + } + } + + override def getOrElseUpdate(key: Long, defaultValue: => V): V = { + if (key == -key) { + val kbits = (key>>>63).toInt + 1 + if ((kbits & extraKeys) == 0) { + val value = defaultValue + extraKeys |= kbits + if (key == 0) zeroValue = value.asInstanceOf[AnyRef] + else minValue = value.asInstanceOf[AnyRef] + value + } + else if (key == 0) zeroValue.asInstanceOf[V] + else minValue.asInstanceOf[V] + } + else { + var i = seekEntryOrOpen(key) + if (i < 0) { + // It is possible that the default value computation was side-effecting + // Our hash table may have resized or even contain what we want now + // (but if it does, we'll replace it) + val value = { + val ok = _keys + val ans = defaultValue + if (ok ne _keys) { + i = seekEntryOrOpen(key) + if (i >= 0) _size -= 1 + } + ans + } + _size += 1 + val j = i & IndexMask + _keys(j) = key + _values(j) = value.asInstanceOf[AnyRef] + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + value + } + else _values(i).asInstanceOf[V] + } + } + + /** Retrieves the value associated with a key, or the default for that type if none exists + * (null for AnyRef, 0 for floats and integers). + * + * Note: this is the fastest way to retrieve a value that may or + * may not exist, if the default null/zero is acceptable. For key/value + * pairs that do exist, `apply` (i.e. `map(key)`) is equally fast. + */ + def getOrNull(key: Long): V = { + if (key == -key) { + if ((((key>>>63).toInt+1) & extraKeys) == 0) null.asInstanceOf[V] + else if (key == 0) zeroValue.asInstanceOf[V] + else minValue.asInstanceOf[V] + } + else { + val i = seekEntry(key) + if (i < 0) null.asInstanceOf[V] else _values(i).asInstanceOf[V] + } + } + + /** Retrieves the value associated with a key. + * If the key does not exist in the map, the `defaultEntry` for that key + * will be returned instead. + */ + override def apply(key: Long): V = { + if (key == -key) { + if ((((key>>>63).toInt+1) & extraKeys) == 0) defaultEntry(key) + else if (key == 0) zeroValue.asInstanceOf[V] + else minValue.asInstanceOf[V] + } + else { + val i = seekEntry(key) + if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V] + } + } + + /** The user-supplied default value for the key. Throws an exception + * if no other default behavior was specified. + */ + override def default(key: Long) = defaultEntry(key) + + private def repack(newMask: Int): Unit = { + val ok = _keys + val ov = _values + mask = newMask + _keys = new Array[Long](mask+1) + _values = new Array[AnyRef](mask+1) + _vacant = 0 + var i = 0 + while (i < ok.length) { + val k = ok(i) + if (k != -k) { + val j = seekEmpty(k) + _keys(j) = k + _values(j) = ov(i) + } + i += 1 + } + } + + /** Repacks the contents of this `LongMap` for maximum efficiency of lookup. + * + * For maps that undergo a complex creation process with both addition and + * removal of keys, and then are used heavily with no further removal of + * elements, calling `repack` after the end of the creation can result in + * improved performance. Repacking takes time proportional to the number + * of entries in the map. + */ + def repack(): Unit = { + var m = mask + if (_size + _vacant >= 0.5*mask && !(_vacant > 0.2*mask)) m = ((m << 1) + 1) & IndexMask + while (m > 8 && 8*_size < m) m = m >>> 1 + repack(m) + } + + override def put(key: Long, value: V): Option[V] = { + if (key == -key) { + if (key == 0) { + val ans = if ((extraKeys&1) == 1) Some(zeroValue.asInstanceOf[V]) else None + zeroValue = value.asInstanceOf[AnyRef] + extraKeys |= 1 + ans + } + else { + val ans = if ((extraKeys&2) == 1) Some(minValue.asInstanceOf[V]) else None + minValue = value.asInstanceOf[AnyRef] + extraKeys |= 2 + ans + } + } + else { + val i = seekEntryOrOpen(key) + if (i < 0) { + val j = i & IndexMask + _keys(j) = key + _values(j) = value.asInstanceOf[AnyRef] + _size += 1 + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + None + } + else { + val ans = Some(_values(i).asInstanceOf[V]) + _keys(i) = key + _values(i) = value.asInstanceOf[AnyRef] + ans + } + } + } + + /** Updates the map to include a new key-value pair. + * + * This is the fastest way to add an entry to a `LongMap`. + */ + override def update(key: Long, value: V): Unit = { + if (key == -key) { + if (key == 0) { + zeroValue = value.asInstanceOf[AnyRef] + extraKeys |= 1 + } + else { + minValue = value.asInstanceOf[AnyRef] + extraKeys |= 2 + } + } + else { + val i = seekEntryOrOpen(key) + if (i < 0) { + val j = i & IndexMask + _keys(j) = key + _values(j) = value.asInstanceOf[AnyRef] + _size += 1 + if ((i & VacantBit) != 0) _vacant -= 1 + else if (imbalanced) repack() + } + else { + _keys(i) = key + _values(i) = value.asInstanceOf[AnyRef] + } + } + } + + /** Adds a new key/value pair to this map and returns the map. */ + @deprecated("Use `addOne` or `update` instead; infix operations with an operand of multiple args will be deprecated", "2.13.3") + def +=(key: Long, value: V): this.type = { update(key, value); this } + + /** Adds a new key/value pair to this map and returns the map. */ + @inline final def addOne(key: Long, value: V): this.type = { update(key, value); this } + + @inline override final def addOne(kv: (Long, V)): this.type = { update(kv._1, kv._2); this } + + def subtractOne(key: Long): this.type = { + if (key == -key) { + if (key == 0L) { + extraKeys &= 0x2 + zeroValue = null + } + else { + extraKeys &= 0x1 + minValue = null + } + } + else { + val i = seekEntry(key) + if (i >= 0) { + _size -= 1 + _vacant += 1 + _keys(i) = Long.MinValue + _values(i) = null + } + } + this + } + + def iterator: Iterator[(Long, V)] = new AbstractIterator[(Long, V)] { + private[this] val kz = _keys + private[this] val vz = _values + + private[this] var nextPair: (Long, V) = + if (extraKeys==0) null + else if ((extraKeys&1)==1) (0L, zeroValue.asInstanceOf[V]) + else (Long.MinValue, minValue.asInstanceOf[V]) + + private[this] var anotherPair: (Long, V) = + if (extraKeys==3) (Long.MinValue, minValue.asInstanceOf[V]) + else null + + private[this] var index = 0 + + def hasNext: Boolean = nextPair != null || (index < kz.length && { + var q = kz(index) + while (q == -q) { + index += 1 + if (index >= kz.length) return false + q = kz(index) + } + nextPair = (kz(index), vz(index).asInstanceOf[V]) + index += 1 + true + }) + def next() = { + if (nextPair == null && !hasNext) throw new NoSuchElementException("next") + val ans = nextPair + if (anotherPair != null) { + nextPair = anotherPair + anotherPair = null + } + else nextPair = null + ans + } + } + + // TODO PERF override these for efficiency. See immutable.LongMap for how to organize the code. + override def keysIterator: Iterator[Long] = super.keysIterator + override def valuesIterator: Iterator[V] = super.valuesIterator + + override def foreach[U](f: ((Long,V)) => U): Unit = { + if ((extraKeys & 1) == 1) f((0L, zeroValue.asInstanceOf[V])) + if ((extraKeys & 2) == 2) f((Long.MinValue, minValue.asInstanceOf[V])) + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + f((k, _values(i).asInstanceOf[V])) + } + i += 1 + } + } + + override def foreachEntry[U](f: (Long,V) => U): Unit = { + if ((extraKeys & 1) == 1) f(0L, zeroValue.asInstanceOf[V]) + if ((extraKeys & 2) == 2) f(Long.MinValue, minValue.asInstanceOf[V]) + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + f(k, _values(i).asInstanceOf[V]) + } + i += 1 + } + } + + override def clone(): LongMap[V] = { + val kz = java.util.Arrays.copyOf(_keys, _keys.length) + val vz = java.util.Arrays.copyOf(_values, _values.length) + val lm = new LongMap[V](defaultEntry, 1, false) + lm.initializeTo(mask, extraKeys, zeroValue, minValue, _size, _vacant, kz, vz) + lm + } + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def +[V1 >: V](kv: (Long, V1)): LongMap[V1] = { + val lm = clone().asInstanceOf[LongMap[V1]] + lm += kv + lm + } + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (Long, V1), elem2: (Long, V1), elems: (Long, V1)*): LongMap[V1] = { + val m = this + elem1 + elem2 + if(elems.isEmpty) m else m.concat(elems) + } + + override def concat[V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]): LongMap[V1] = { + val lm = clone().asInstanceOf[LongMap[V1]] + xs.iterator.foreach(kv => lm += kv) + lm + } + + override def ++ [V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]): LongMap[V1] = concat(xs) + + @deprecated("Use m.clone().addOne(k,v) instead of m.updated(k, v)", "2.13.0") + override def updated[V1 >: V](key: Long, value: V1): LongMap[V1] = + clone().asInstanceOf[LongMap[V1]].addOne(key, value) + + /** Applies a function to all keys of this map. */ + def foreachKey[A](f: Long => A): Unit = { + if ((extraKeys & 1) == 1) f(0L) + if ((extraKeys & 2) == 2) f(Long.MinValue) + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + f(k) + } + i += 1 + } + } + + /** Applies a function to all values of this map. */ + def foreachValue[A](f: V => A): Unit = { + if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]) + if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]) + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + f(_values(i).asInstanceOf[V]) + } + i += 1 + } + } + + /** Creates a new `LongMap` with different values. + * Unlike `mapValues`, this method generates a new + * collection immediately. + */ + def mapValuesNow[V1](f: V => V1): LongMap[V1] = { + val zv = if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null + val mv = if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null + val lm = new LongMap[V1](LongMap.exceptionDefault, 1, false) + val kz = java.util.Arrays.copyOf(_keys, _keys.length) + val vz = new Array[AnyRef](_values.length) + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + vz(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] + } + i += 1 + } + lm.initializeTo(mask, extraKeys, zv, mv, _size, _vacant, kz, vz) + lm + } + + /** Applies a transformation function to all values stored in this map. + * Note: the default, if any, is not transformed. + */ + @deprecated("Use transformValuesInPlace instead of transformValues", "2.13.0") + @`inline` final def transformValues(f: V => V): this.type = transformValuesInPlace(f) + + /** Applies a transformation function to all values stored in this map. + * Note: the default, if any, is not transformed. + */ + def transformValuesInPlace(f: V => V): this.type = { + if ((extraKeys & 1) == 1) zeroValue = f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] + if ((extraKeys & 2) == 2) minValue = f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] + var i,j = 0 + while (i < _keys.length & j < _size) { + val k = _keys(i) + if (k != -k) { + j += 1 + _values(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef] + } + i += 1 + } + this + } + + def map[V2](f: ((Long, V)) => (Long, V2)): LongMap[V2] = LongMap.from(new View.Map(coll, f)) + + def flatMap[V2](f: ((Long, V)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f)) + + def collect[V2](pf: PartialFunction[(Long, V), (Long, V2)]): LongMap[V2] = + strictOptimizedCollect(LongMap.newBuilder[V2], pf) + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(LongMap.toFactory[V](LongMap), this) + + override protected[this] def className = "LongMap" +} + +object LongMap { + private final val IndexMask = 0x3FFFFFFF + private final val MissingBit = 0x80000000 + private final val VacantBit = 0x40000000 + private final val MissVacant = 0xC0000000 + + private val exceptionDefault: Long => Nothing = (k: Long) => throw new NoSuchElementException(k.toString) + + /** A builder for instances of `LongMap`. + * + * This builder can be reused to create multiple instances. + */ + final class LongMapBuilder[V] extends ReusableBuilder[(Long, V), LongMap[V]] { + private[collection] var elems: LongMap[V] = new LongMap[V] + override def addOne(entry: (Long, V)): this.type = { + elems += entry + this + } + def clear(): Unit = elems = new LongMap[V] + def result(): LongMap[V] = elems + override def knownSize: Int = elems.knownSize + } + + /** Creates a new `LongMap` with zero or more key/value pairs. */ + def apply[V](elems: (Long, V)*): LongMap[V] = buildFromIterableOnce(elems) + + private def buildFromIterableOnce[V](elems: IterableOnce[(Long, V)]): LongMap[V] = { + var sz = elems.knownSize + if(sz < 0) sz = 4 + val lm = new LongMap[V](sz * 2) + elems.iterator.foreach{ case (k,v) => lm(k) = v } + if (lm.size < (sz>>3)) lm.repack() + lm + } + + /** Creates a new empty `LongMap`. */ + def empty[V]: LongMap[V] = new LongMap[V] + + /** Creates a new empty `LongMap` with the supplied default */ + def withDefault[V](default: Long => V): LongMap[V] = new LongMap[V](default) + + /** Creates a new `LongMap` from an existing source collection. A source collection + * which is already a `LongMap` gets cloned. + * + * @param source Source collection + * @tparam A the type of the collection’s elements + * @return a new `LongMap` with the elements of `source` + */ + def from[V](source: IterableOnce[(Long, V)]): LongMap[V] = source match { + case source: LongMap[_] => source.clone().asInstanceOf[LongMap[V]] + case _ => buildFromIterableOnce(source) + } + + def newBuilder[V]: ReusableBuilder[(Long, V), LongMap[V]] = new LongMapBuilder[V] + + /** Creates a new `LongMap` from arrays of keys and values. + * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. + */ + def fromZip[V](keys: Array[Long], values: Array[V]): LongMap[V] = { + val sz = math.min(keys.length, values.length) + val lm = new LongMap[V](sz * 2) + var i = 0 + while (i < sz) { lm(keys(i)) = values(i); i += 1 } + if (lm.size < (sz>>3)) lm.repack() + lm + } + + /** Creates a new `LongMap` from keys and values. + * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. + */ + def fromZip[V](keys: scala.collection.Iterable[Long], values: scala.collection.Iterable[V]): LongMap[V] = { + val sz = math.min(keys.size, values.size) + val lm = new LongMap[V](sz * 2) + val ki = keys.iterator + val vi = values.iterator + while (ki.hasNext && vi.hasNext) lm(ki.next()) = vi.next() + if (lm.size < (sz >> 3)) lm.repack() + lm + } + + implicit def toFactory[V](dummy: LongMap.type): Factory[(Long, V), LongMap[V]] = ToFactory.asInstanceOf[Factory[(Long, V), LongMap[V]]] + + @SerialVersionUID(3L) + private[this] object ToFactory extends Factory[(Long, AnyRef), LongMap[AnyRef]] with Serializable { + def fromSpecific(it: IterableOnce[(Long, AnyRef)]): LongMap[AnyRef] = LongMap.from[AnyRef](it) + def newBuilder: Builder[(Long, AnyRef), LongMap[AnyRef]] = LongMap.newBuilder[AnyRef] + } + + implicit def toBuildFrom[V](factory: LongMap.type): BuildFrom[Any, (Long, V), LongMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Long, V), LongMap[V]]] + private object ToBuildFrom extends BuildFrom[Any, (Long, AnyRef), LongMap[AnyRef]] { + def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]) = LongMap.from(it) + def newBuilder(from: Any) = LongMap.newBuilder[AnyRef] + } + + implicit def iterableFactory[V]: Factory[(Long, V), LongMap[V]] = toFactory(this) + implicit def buildFromLongMap[V]: BuildFrom[LongMap[_], (Long, V), LongMap[V]] = toBuildFrom(this) +} diff --git a/tests/pos-special/stdlib/collection/mutable/Map.scala b/tests/pos-special/stdlib/collection/mutable/Map.scala new file mode 100644 index 000000000000..610dc01029cc --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Map.scala @@ -0,0 +1,268 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +/** Base type of mutable Maps */ +trait Map[K, V] + extends Iterable[(K, V)] + with collection.Map[K, V] + with MapOps[K, V, Map, Map[K, V]] + with Growable[(K, V)] + with Shrinkable[K] + with MapFactoryDefaults[K, V, Map, Iterable] { + + override def mapFactory: scala.collection.MapFactory[Map] = Map + + /* + //TODO consider keeping `remove` because it returns the removed entry + @deprecated("Use subtract or -= instead of remove", "2.13.0") + def remove(key: K): Option[V] = { + val old = get(key) + if(old.isDefined) subtract(key) + old + } + */ + + /** The same map with a given default function. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefault(d: K => V): Map[K, V] = new Map.WithDefault[K, V](this, d) + + /** The same map with a given default value. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + def withDefaultValue(d: V): Map[K, V] = new Map.WithDefault[K, V](this, x => d) +} + +/** + * @define coll mutable map + * @define Coll `mutable.Map` + */ +trait MapOps[K, V, +CC[X, Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] + extends IterableOps[(K, V), Iterable, C] + with collection.MapOps[K, V, CC, C] + with Cloneable[C] + with Builder[(K, V), C] + with Growable[(K, V)] + with Shrinkable[K] { + + def result(): C = coll + + @deprecated("Use - or remove on an immutable Map", "2.13.0") + final def - (key: K): C = clone() -= key + + @deprecated("Use -- or removeAll on an immutable Map", "2.13.0") + final def - (key1: K, key2: K, keys: K*): C = clone() -= key1 -= key2 --= keys + + /** Adds a new key/value pair to this map and optionally returns previously bound value. + * If the map already contains a + * mapping for the key, it will be overridden by the new value. + * + * @param key the key to update + * @param value the new value + * @return an option value containing the value associated with the key + * before the `put` operation was executed, or `None` if `key` + * was not defined in the map before. + */ + def put(key: K, value: V): Option[V] = { + val r = get(key) + update(key, value) + r + } + + /** Adds a new key/value pair to this map. + * If the map already contains a + * mapping for the key, it will be overridden by the new value. + * + * @param key The key to update + * @param value The new value + */ + def update(key: K, value: V): Unit = { coll += ((key, value)) } + + /** + * Update a mapping for the specified key and its current optionally-mapped value + * (`Some` if there is current mapping, `None` if not). + * + * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`. + * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent). + * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged. + * + * @param key the key value + * @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping + * @return the new value associated with the specified key + */ + def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { + val previousValue = this.get(key) + val nextValue = remappingFunction(previousValue) + (previousValue, nextValue) match { + case (None, None) => // do nothing + case (Some(_), None) => this.remove(key) + case (_, Some(v)) => this.update(key,v) + } + nextValue + } + + /** If given key is already in this map, returns associated value. + * + * Otherwise, computes value from given expression `op`, stores with key + * in map and returns that value. + * + * Concurrent map implementations may evaluate the expression `op` + * multiple times, or may evaluate `op` without inserting the result. + * + * @param key the key to test + * @param op the computation yielding the value to associate with `key`, if + * `key` is previously unbound. + * @return the value associated with key (either previously or as a result + * of executing the method). + */ + def getOrElseUpdate(key: K, op: => V): V = + get(key) match { + case Some(v) => v + case None => val d = op; this(key) = d; d + } + + /** Removes a key from this map, returning the value associated previously + * with that key as an option. + * @param key the key to be removed + * @return an option value containing the value associated previously with `key`, + * or `None` if `key` was not defined in the map before. + */ + def remove(key: K): Option[V] = { + val r = get(key) + if (r.isDefined) this -= key + r + } + + def clear(): Unit = { keysIterator foreach -= } + + override def clone(): C = empty ++= this + + @deprecated("Use filterInPlace instead", "2.13.0") + @inline final def retain(p: (K, V) => Boolean): this.type = filterInPlace(p) + + /** Retains only those mappings for which the predicate + * `p` returns `true`. + * + * @param p The test predicate + */ + def filterInPlace(p: (K, V) => Boolean): this.type = { + if (!isEmpty) this match { + case tm: concurrent.Map[_, _] => tm.asInstanceOf[concurrent.Map[K, V]].filterInPlaceImpl(p) + case _ => + val array = this.toArray[Any] // scala/bug#7269 toArray avoids ConcurrentModificationException + val arrayLength = array.length + var i = 0 + while (i < arrayLength) { + val (k, v) = array(i).asInstanceOf[(K, V)] + if (!p(k, v)) { + this -= k + } + i += 1 + } + } + this + } + + @deprecated("Use mapValuesInPlace instead", "2.13.0") + @inline final def transform(f: (K, V) => V): this.type = mapValuesInPlace(f) + + /** Applies a transformation function to all values contained in this map. + * The transformation function produces new values from existing keys + * associated values. + * + * @param f the transformation to apply + * @return the map itself. + */ + def mapValuesInPlace(f: (K, V) => V): this.type = { + if (!isEmpty) this match { + case hm: mutable.HashMap[_, _] => hm.asInstanceOf[mutable.HashMap[K, V]].mapValuesInPlaceImpl(f) + case tm: concurrent.Map[_, _] => tm.asInstanceOf[concurrent.Map[K, V]].mapValuesInPlaceImpl(f) + case _ => + val array = this.toArray[Any] + val arrayLength = array.length + var i = 0 + while (i < arrayLength) { + val (k, v) = array(i).asInstanceOf[(K, V)] + update(k, f(k, v)) + i += 1 + } + } + this + } + + @deprecated("Use m.clone().addOne((k,v)) instead of m.updated(k, v)", "2.13.0") + def updated[V1 >: V](key: K, value: V1): CC[K, V1] = + clone().asInstanceOf[CC[K, V1]].addOne((key, value)) + + override def knownSize: Int = super[IterableOps].knownSize +} + +/** + * $factoryInfo + * @define coll mutable map + * @define Coll `mutable.Map` + */ +@SerialVersionUID(3L) +object Map extends MapFactory.Delegate[Map](HashMap) { + + @SerialVersionUID(3L) + class WithDefault[K, V](val underlying: Map[K, V], val defaultValue: K => V) + extends AbstractMap[K, V] + with MapOps[K, V, Map, WithDefault[K, V]] with Serializable { + + override def default(key: K): V = defaultValue(key) + + def iterator: scala.collection.Iterator[(K, V)] = underlying.iterator + override def isEmpty: Boolean = underlying.isEmpty + override def knownSize: Int = underlying.knownSize + override def mapFactory: MapFactory[Map] = underlying.mapFactory + + override def clear(): Unit = underlying.clear() + + def get(key: K): Option[V] = underlying.get(key) + + def subtractOne(elem: K): WithDefault.this.type = { underlying.subtractOne(elem); this } + + def addOne(elem: (K, V)): WithDefault.this.type = { underlying.addOne(elem); this } + + override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]): Map[K, V2] = + underlying.concat(suffix).withDefault(defaultValue) + + override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]): WithDefault[K, V] = + new WithDefault[K, V](mapFactory.from(coll), defaultValue) + + override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] = + Map.newBuilder.mapResult((p: Map[K, V]) => new WithDefault[K, V](p, defaultValue)) + } + +} + +/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ +abstract class AbstractMap[K, V] extends scala.collection.AbstractMap[K, V] with Map[K, V] diff --git a/tests/pos-special/stdlib/collection/mutable/MultiMap.scala b/tests/pos-special/stdlib/collection/mutable/MultiMap.scala new file mode 100644 index 000000000000..13d7c35e0165 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/MultiMap.scala @@ -0,0 +1,115 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + + +/** A trait for mutable maps with multiple values assigned to a key. + * + * This class is typically used as a mixin. It turns maps which map `K` + * to `Set[V]` objects into multimaps that map `K` to `V` objects. + * + * @example {{{ + * // first import all necessary types from package `collection.mutable` + * import collection.mutable.{ HashMap, MultiMap, Set } + * + * // to create a `MultiMap` the easiest way is to mixin it into a normal + * // `Map` instance + * val mm = new HashMap[Int, Set[String]] with MultiMap[Int, String] + * + * // to add key-value pairs to a multimap it is important to use + * // the method `addBinding` because standard methods like `+` will + * // overwrite the complete key-value pair instead of adding the + * // value to the existing key + * mm.addBinding(1, "a") + * mm.addBinding(2, "b") + * mm.addBinding(1, "c") + * + * // mm now contains `Map(2 -> Set(b), 1 -> Set(c, a))` + * + * // to check if the multimap contains a value there is method + * // `entryExists`, which allows to traverse the including set + * mm.entryExists(1, _ == "a") == true + * mm.entryExists(1, _ == "b") == false + * mm.entryExists(2, _ == "b") == true + * + * // to remove a previous added value there is the method `removeBinding` + * mm.removeBinding(1, "a") + * mm.entryExists(1, _ == "a") == false + * }}} + * + * @define coll multimap + * @define Coll `MultiMap` + */ +@deprecated("Use a scala.collection.mutable.MultiDict in the scala-collection-contrib module", "2.13.0") +trait MultiMap[K, V] extends Map[K, Set[V]] { + /** Creates a new set. + * + * Classes that use this trait as a mixin can override this method + * to have the desired implementation of sets assigned to new keys. + * By default this is `HashSet`. + * + * @return An empty set of values of type `V`. + */ + protected def makeSet: Set[V] = new HashSet[V] + + /** Assigns the specified `value` to a specified `key`. If the key + * already has a binding to equal to `value`, nothing is changed; + * otherwise a new binding is added for that `key`. + * + * @param key The key to which to bind the new value. + * @param value The value to bind to the key. + * @return A reference to this multimap. + */ + def addBinding(key: K, value: V): this.type = { + get(key) match { + case None => + val set = makeSet + set += value + this(key) = set + case Some(set) => + set += value + } + this + } + + /** Removes the binding of `value` to `key` if it exists, otherwise this + * operation doesn't have any effect. + * + * If this was the last value assigned to the specified key, the + * set assigned to that key will be removed as well. + * + * @param key The key of the binding. + * @param value The value to remove. + * @return A reference to this multimap. + */ + def removeBinding(key: K, value: V): this.type = { + get(key) match { + case None => + case Some(set) => + set -= value + if (set.isEmpty) this -= key + } + this + } + + /** Checks if there exists a binding to `key` such that it satisfies the predicate `p`. + * + * @param key The key for which the predicate is checked. + * @param p The predicate which a value assigned to the key must satisfy. + * @return A boolean if such a binding exists + */ + def entryExists(key: K, p: V => Boolean): Boolean = get(key) match { + case None => false + case Some(set) => set exists p + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/MutationTracker.scala b/tests/pos-special/stdlib/collection/mutable/MutationTracker.scala index 3e9b16540031..e98536d0dad5 100644 --- a/tests/pos-special/stdlib/collection/mutable/MutationTracker.scala +++ b/tests/pos-special/stdlib/collection/mutable/MutationTracker.scala @@ -15,7 +15,6 @@ package collection package mutable import java.util.ConcurrentModificationException -import language.experimental.captureChecking /** * Utilities to check that mutations to a client that tracks @@ -67,7 +66,7 @@ private object MutationTracker { * @param mutationCount a by-name provider of the current mutation count * @tparam A the type of the iterator's elements */ - final class CheckedIterator[A](underlying: Iterator[A]^, mutationCount: => Int) extends AbstractIterator[A] { + final class CheckedIterator[A](underlying: Iterator[A], mutationCount: => Int) extends AbstractIterator[A] { private[this] val expectedCount = mutationCount def hasNext: Boolean = { diff --git a/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala b/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala new file mode 100644 index 000000000000..22e99d4650d1 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala @@ -0,0 +1,306 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import java.lang.Integer.numberOfLeadingZeros +import java.util.ConcurrentModificationException +import scala.collection.generic.DefaultSerializable + +/** + * @define Coll `OpenHashMap` + * @define coll open hash map + */ +@deprecated("Use HashMap or one of the specialized versions (LongMap, AnyRefMap) instead of OpenHashMap", "2.13.0") +@SerialVersionUID(3L) +object OpenHashMap extends MapFactory[OpenHashMap] { + + def empty[K, V] = new OpenHashMap[K, V] + def from[K, V](it: IterableOnce[(K, V)]): OpenHashMap[K,V] = empty ++= it + + def newBuilder[K, V]: Builder[(K, V), OpenHashMap[K,V]] = + new GrowableBuilder[(K, V), OpenHashMap[K, V]](empty) + + /** A hash table entry. + * + * The entry is occupied if and only if its `value` is a `Some`; + * deleted if and only if its `value` is `None`. + * If its `key` is not the default value of type `Key`, the entry is occupied. + * If the entry is occupied, `hash` contains the hash value of `key`. + */ + final private class OpenEntry[Key, Value](var key: Key, + var hash: Int, + var value: Option[Value]) + + private[mutable] def nextPositivePowerOfTwo(target: Int): Int = 1 << -numberOfLeadingZeros(target - 1) +} + +/** A mutable hash map based on an open addressing method. The precise scheme is + * undefined, but it should make a reasonable effort to ensure that an insert + * with consecutive hash codes is not unnecessarily penalised. In particular, + * mappings of consecutive integer keys should work without significant + * performance loss. + * + * @tparam Key type of the keys in this map. + * @tparam Value type of the values in this map. + * @param initialSize the initial size of the internal hash table. + * + * @define Coll `OpenHashMap` + * @define coll open hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@deprecated("Use HashMap or one of the specialized versions (LongMap, AnyRefMap) instead of OpenHashMap", "2.13.0") +class OpenHashMap[Key, Value](initialSize : Int) + extends AbstractMap[Key, Value] + with MapOps[Key, Value, OpenHashMap, OpenHashMap[Key, Value]] + with StrictOptimizedIterableOps[(Key, Value), Iterable, OpenHashMap[Key, Value]] + with MapFactoryDefaults[Key, Value, OpenHashMap, Iterable] + with DefaultSerializable { + + import OpenHashMap.OpenEntry + private type Entry = OpenEntry[Key, Value] + + /** A default constructor creates a hashmap with initial size `8`. + */ + def this() = this(8) + + override def mapFactory: MapFactory[OpenHashMap] = OpenHashMap + + private[this] val actualInitialSize = OpenHashMap.nextPositivePowerOfTwo(initialSize) + + private[this] var mask = actualInitialSize - 1 + + /** The hash table. + * + * The table's entries are initialized to `null`, indication of an empty slot. + * A slot is either deleted or occupied if and only if the entry is non-`null`. + */ + private[this] var table = new Array[Entry](actualInitialSize) + + private[this] var _size = 0 + private[this] var deleted = 0 + + // Used for tracking inserts so that iterators can determine if concurrent modification has occurred. + private[this] var modCount = 0 + + override def size = _size + override def knownSize: Int = size + private[this] def size_=(s : Int): Unit = _size = s + override def isEmpty: Boolean = _size == 0 + /** Returns a mangled hash code of the provided key. */ + protected def hashOf(key: Key) = { + var h = key.## + h ^= ((h >>> 20) ^ (h >>> 12)) + h ^ (h >>> 7) ^ (h >>> 4) + } + + /** Increase the size of the table. + * Copy only the occupied slots, effectively eliminating the deleted slots. + */ + private[this] def growTable() = { + val oldSize = mask + 1 + val newSize = 4 * oldSize + val oldTable = table + table = new Array[Entry](newSize) + mask = newSize - 1 + oldTable.foreach( entry => + if (entry != null && entry.value != None) + table(findIndex(entry.key, entry.hash)) = entry ) + deleted = 0 + } + + /** Return the index of the first slot in the hash table (in probe order) + * that is, in order of preference, either occupied by the given key, deleted, or empty. + * + * @param hash hash value for `key` + */ + private[this] def findIndex(key: Key, hash: Int): Int = { + var index = hash & mask + var j = 0 + + // Index of the first slot containing a deleted entry, or -1 if none found yet + var firstDeletedIndex = -1 + + var entry = table(index) + while (entry != null) { + if (entry.hash == hash && entry.key == key && entry.value != None) + return index + + if (firstDeletedIndex == -1 && entry.value == None) + firstDeletedIndex = index + + j += 1 + index = (index + j) & mask + entry = table(index) + } + + if (firstDeletedIndex == -1) index else firstDeletedIndex + } + + // TODO refactor `put` to extract `findOrAddEntry` and implement this in terms of that to avoid Some boxing. + override def update(key: Key, value: Value): Unit = put(key, value) + + @deprecatedOverriding("addOne should not be overridden in order to maintain consistency with put.", "2.11.0") + def addOne (kv: (Key, Value)): this.type = { put(kv._1, kv._2); this } + + @deprecatedOverriding("subtractOne should not be overridden in order to maintain consistency with remove.", "2.11.0") + def subtractOne (key: Key): this.type = { remove(key); this } + + override def put(key: Key, value: Value): Option[Value] = + put(key, hashOf(key), value) + + private def put(key: Key, hash: Int, value: Value): Option[Value] = { + if (2 * (size + deleted) > mask) growTable() + val index = findIndex(key, hash) + val entry = table(index) + if (entry == null) { + table(index) = new OpenEntry(key, hash, Some(value)) + modCount += 1 + size += 1 + None + } else { + val res = entry.value + if (entry.value == None) { + entry.key = key + entry.hash = hash + size += 1 + deleted -= 1 + modCount += 1 + } + entry.value = Some(value) + res + } + } + + /** Delete the hash table slot contained in the given entry. */ + @`inline` + private[this] def deleteSlot(entry: Entry) = { + entry.key = null.asInstanceOf[Key] + entry.hash = 0 + entry.value = None + + size -= 1 + deleted += 1 + } + + override def remove(key : Key): Option[Value] = { + val entry = table(findIndex(key, hashOf(key))) + if (entry != null && entry.value != None) { + val res = entry.value + deleteSlot(entry) + res + } else None + } + + def get(key : Key) : Option[Value] = { + val hash = hashOf(key) + var index = hash & mask + var entry = table(index) + var j = 0 + while(entry != null){ + if (entry.hash == hash && + entry.key == key){ + return entry.value + } + + j += 1 + index = (index + j) & mask + entry = table(index) + } + None + } + + /** An iterator over the elements of this map. Use of this iterator follows + * the same contract for concurrent modification as the foreach method. + * + * @return the iterator + */ + def iterator: Iterator[(Key, Value)] = new OpenHashMapIterator[(Key, Value)] { + override protected def nextResult(node: Entry): (Key, Value) = (node.key, node.value.get) + } + + override def keysIterator: Iterator[Key] = new OpenHashMapIterator[Key] { + override protected def nextResult(node: Entry): Key = node.key + } + override def valuesIterator: Iterator[Value] = new OpenHashMapIterator[Value] { + override protected def nextResult(node: Entry): Value = node.value.get + } + + private abstract class OpenHashMapIterator[A] extends AbstractIterator[A] { + private[this] var index = 0 + private[this] val initialModCount = modCount + + private[this] def advance(): Unit = { + if (initialModCount != modCount) throw new ConcurrentModificationException + while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1 + } + + def hasNext = {advance(); index <= mask } + + def next() = { + advance() + val result = table(index) + index += 1 + nextResult(result) + } + protected def nextResult(node: Entry): A + } + + override def clone() = { + val it = new OpenHashMap[Key, Value] + foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get)) + it + } + + /** Loop over the key, value mappings of this map. + * + * The behaviour of modifying the map during an iteration is as follows: + * - Deleting a mapping is always permitted. + * - Changing the value of mapping which is already present is permitted. + * - Anything else is not permitted. It will usually, but not always, throw an exception. + * + * @tparam U The return type of the specified function `f`, return result of which is ignored. + * @param f The function to apply to each key, value mapping. + */ + override def foreach[U](f : ((Key, Value)) => U): Unit = { + val startModCount = modCount + foreachUndeletedEntry(entry => { + if (modCount != startModCount) throw new ConcurrentModificationException + f((entry.key, entry.value.get))} + ) + } + override def foreachEntry[U](f : (Key, Value) => U): Unit = { + val startModCount = modCount + foreachUndeletedEntry(entry => { + if (modCount != startModCount) throw new ConcurrentModificationException + f(entry.key, entry.value.get)} + ) + } + + private[this] def foreachUndeletedEntry(f : Entry => Unit): Unit = { + table.foreach(entry => if (entry != null && entry.value != None) f(entry)) + } + + override def mapValuesInPlace(f : (Key, Value) => Value): this.type = { + foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get))) + this + } + + override def filterInPlace(f : (Key, Value) => Boolean): this.type = { + foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) deleteSlot(entry)) + this + } + + override protected[this] def stringPrefix = "OpenHashMap" +} diff --git a/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala b/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala new file mode 100644 index 000000000000..5572bdca3cf6 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala @@ -0,0 +1,402 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.collection.generic.DefaultSerializationProxy +import scala.math.Ordering + +/** A heap-based priority queue. + * + * To prioritize elements of type `A` there must be an implicit + * `Ordering[A]` available at creation. Elements are retrieved + * in priority order by using [[dequeue]] or [[dequeueAll]]. + * + * If multiple elements have the same priority as determined by the ordering for this + * `PriorityQueue`, no guarantees are made regarding the order in which those elements + * are returned by `dequeue` or `dequeueAll`. In particular, that means this + * class does not guarantee first-in-first-out behavior, as may be + * incorrectly inferred from the fact that this data structure is + * called a "queue". + * + * Only the `dequeue` and `dequeueAll` methods will return elements in priority + * order (while removing elements from the heap). Standard collection methods + * such as `drop`, `iterator`, `toList` and `toString` use an arbitrary + * iteration order: they will traverse the heap or remove elements + * in whichever order seems most convenient. + * + * Therefore, printing a `PriorityQueue` will not show elements in priority order, + * though the highest-priority element will be printed first. + * To print the elements in order, it's necessary to `dequeue` them. + * To do this non-destructively, duplicate the `PriorityQueue` first; + * the `clone` method is a suitable way to obtain a disposable copy. + * + * Client keys are assumed to be immutable. Mutating keys may violate + * the invariant of the underlying heap-ordered tree. Note that [[clone]] + * does not rebuild the underlying tree. + * + * {{{ + * scala> val pq = collection.mutable.PriorityQueue(1, 2, 5, 3, 7) + * val pq: scala.collection.mutable.PriorityQueue[Int] = PriorityQueue(7, 3, 5, 1, 2) + * + * scala> pq.toList // also not in order + * val res0: List[Int] = List(7, 3, 5, 1, 2) + * + * scala> pq.clone.dequeueAll + * val res1: Seq[Int] = ArraySeq(7, 5, 3, 2, 1) + * }}} + * + * @tparam A type of the elements in this priority queue. + * @param ord implicit ordering used to compare the elements of type `A`. + * + * @define Coll PriorityQueue + * @define coll priority queue + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed class PriorityQueue[A](implicit val ord: Ordering[A]) + extends AbstractIterable[A] + with Iterable[A] + with IterableOps[A, Iterable, PriorityQueue[A]] + with StrictOptimizedIterableOps[A, Iterable, PriorityQueue[A]] + with Builder[A, PriorityQueue[A]] + with Cloneable[PriorityQueue[A]] + with Growable[A] + with Serializable +{ + + private class ResizableArrayAccess[A0] extends ArrayBuffer[A0] { + override def mapInPlace(f: A0 => A0): this.type = { + var i = 1 // see "we do not use array(0)" comment below (???) + val siz = this.size + while (i < siz) { this(i) = f(this(i)); i += 1 } + this + } + + def p_size0 = size0 + def p_size0_=(s: Int) = size0 = s + def p_array = array + def p_ensureSize(n: Int) = super.ensureSize(n) + def p_ensureAdditionalSize(n: Int) = super.ensureAdditionalSize(n) + def p_swap(a: Int, b: Int): Unit = { + val h = array(a) + array(a) = array(b) + array(b) = h + } + } + + private val resarr = new ResizableArrayAccess[A] + + resarr.p_size0 += 1 // we do not use array(0) TODO: explain -- what is the first element even for? + def length: Int = resarr.length - 1 // adjust length accordingly + override def size: Int = length + override def knownSize: Int = length + override def isEmpty: Boolean = resarr.p_size0 < 2 + + // not eligible for EvidenceIterableFactoryDefaults since C != CC[A] (PriorityQueue[A] != Iterable[A]) + override protected def fromSpecific(coll: scala.collection.IterableOnce[A]): PriorityQueue[A] = PriorityQueue.from(coll) + override protected def newSpecificBuilder: Builder[A, PriorityQueue[A]] = PriorityQueue.newBuilder + override def empty: PriorityQueue[A] = PriorityQueue.empty + + def mapInPlace(f: A => A): this.type = { + resarr.mapInPlace(f) + heapify(1) + this + } + + def result() = this + + private def toA(x: AnyRef): A = x.asInstanceOf[A] + protected def fixUp(as: Array[AnyRef], m: Int): Unit = { + var k: Int = m + // use `ord` directly to avoid allocating `OrderingOps` + while (k > 1 && ord.lt(toA(as(k / 2)), toA(as(k)))) { + resarr.p_swap(k, k / 2) + k = k / 2 + } + } + + protected def fixDown(as: Array[AnyRef], m: Int, n: Int): Boolean = { + // returns true if any swaps were done (used in heapify) + var k: Int = m + while (n >= 2 * k) { + var j = 2 * k + // use `ord` directly to avoid allocating `OrderingOps` + if (j < n && ord.lt(toA(as(j)), toA(as(j + 1)))) + j += 1 + if (ord.gteq(toA(as(k)), toA(as(j)))) + return k != m + else { + val h = as(k) + as(k) = as(j) + as(j) = h + k = j + } + } + k != m + } + + /** Inserts a single element into the priority queue. + * + * @param elem the element to insert. + * @return this $coll. + */ + def addOne(elem: A): this.type = { + resarr.p_ensureAdditionalSize(1) + resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef] + fixUp(resarr.p_array, resarr.p_size0) + resarr.p_size0 += 1 + this + } + + override def addAll(xs: IterableOnce[A]): this.type = { + val from = resarr.p_size0 + for (x <- xs.iterator) unsafeAdd(x) + heapify(from) + this + } + + private def unsafeAdd(elem: A): Unit = { + // like += but skips fixUp, which breaks the ordering invariant + // a series of unsafeAdds MUST be followed by heapify + resarr.p_ensureAdditionalSize(1) + resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef] + resarr.p_size0 += 1 + } + + private def heapify(from: Int): Unit = { + // elements at indices 1..from-1 were already in heap order before any adds + // elements at indices from..n are newly added, their order must be fixed + val n = length + + if (from <= 2) { + // no pre-existing order to maintain, do the textbook heapify algorithm + for (i <- n/2 to 1 by -1) fixDown(resarr.p_array, i, n) + } + else if (n - from < 4) { + // for very small adds, doing the simplest fix is faster + for (i <- from to n) fixUp(resarr.p_array, i) + } + else { + var min = from/2 // tracks the minimum element in the queue + val queue = scala.collection.mutable.Queue[Int](min) + + // do fixDown on the parents of all the new elements + // except the parent of the first new element, which is in the queue + // (that parent is treated specially because it might be the root) + for (i <- n/2 until min by -1) { + if (fixDown(resarr.p_array, i, n)) { + // there was a swap, so also need to fixDown i's parent + val parent = i/2 + if (parent < min) { // make sure same parent isn't added twice + min = parent + queue += parent + } + } + } + + while (queue.nonEmpty) { + val i = queue.dequeue() + if (fixDown(resarr.p_array, i, n)) { + val parent = i/2 + if (parent < min && parent > 0) { + // the "parent > 0" is to avoid adding the parent of the root + min = parent + queue += parent + } + } + } + } + } + + /** Adds all elements provided by a `IterableOnce` object + * into the priority queue. + * + * @param xs a iterable object. + * @return a new priority queue containing elements of both `xs` and `this`. + */ + def ++(xs: IterableOnce[A]): PriorityQueue[A] = { this.clone() ++= xs } + + /** Adds all elements to the queue. + * + * @param elems the elements to add. + */ + def enqueue(elems: A*): Unit = { this ++= elems } + + /** Returns the element with the highest priority in the queue, + * and removes this element from the queue. + * + * @throws NoSuchElementException + * @return the element with the highest priority. + */ + def dequeue(): A = + if (resarr.p_size0 > 1) { + resarr.p_size0 = resarr.p_size0 - 1 + val result = resarr.p_array(1) + resarr.p_array(1) = resarr.p_array(resarr.p_size0) + resarr.p_array(resarr.p_size0) = null // erase reference from array + fixDown(resarr.p_array, 1, resarr.p_size0 - 1) + toA(result) + } else + throw new NoSuchElementException("no element to remove from heap") + + def dequeueAll[A1 >: A]: immutable.Seq[A1] = { + val b = ArrayBuilder.make[Any] + b.sizeHint(size) + while (nonEmpty) { + b += dequeue() + } + immutable.ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[immutable.ArraySeq[A1]] + } + + /** Returns the element with the highest priority in the queue, + * or throws an error if there is no element contained in the queue. + * + * @return the element with the highest priority. + */ + override def head: A = if (resarr.p_size0 > 1) toA(resarr.p_array(1)) else throw new NoSuchElementException("queue is empty") + + /** Removes all elements from the queue. After this operation is completed, + * the queue will be empty. + */ + def clear(): Unit = { + resarr.clear() + resarr.p_size0 = 1 + } + + /** Returns an iterator which yields all the elements. + * + * Note: The order of elements returned is undefined. + * If you want to traverse the elements in priority queue + * order, use `clone().dequeueAll.iterator`. + * + * @return an iterator over all the elements. + */ + override def iterator: Iterator[A] = resarr.iterator.drop(1) + + /** Returns the reverse of this priority queue. The new priority queue has + * the same elements as the original, but the opposite ordering. + * + * For example, the element with the highest priority in `pq` has the lowest + * priority in `pq.reverse`, and vice versa. + * + * Ties are handled arbitrarily. Elements with equal priority may or + * may not be reversed with respect to each other. + * + * @return the reversed priority queue. + */ + def reverse: PriorityQueue[A] = { + val revq = new PriorityQueue[A]()(ord.reverse) + // copy the existing data into the new array backwards + // this won't put it exactly into the correct order, + // but will require less fixing than copying it in + // the original order + val n = resarr.p_size0 + revq.resarr.p_ensureSize(n) + revq.resarr.p_size0 = n + val from = resarr.p_array + val to = revq.resarr.p_array + for (i <- 1 until n) to(i) = from(n-i) + revq.heapify(1) + revq + } + + + /** Returns an iterator which yields all the elements in the reverse order + * than that returned by the method `iterator`. + * + * Note: The order of elements returned is undefined. + * + * @return an iterator over all elements sorted in descending order. + */ + def reverseIterator: Iterator[A] = new AbstractIterator[A] { + private[this] var i = resarr.p_size0 - 1 + def hasNext: Boolean = i >= 1 + def next(): A = { + val n = resarr.p_array(i) + i -= 1 + toA(n) + } + } + + /** Returns a regular queue containing the same elements. + * + * Note: the order of elements is undefined. + */ + def toQueue: Queue[A] = new Queue[A] ++= this.iterator + + /** Returns a textual representation of a queue as a string. + * + * @return the string representation of this queue. + */ + override def toString() = toList.mkString("PriorityQueue(", ", ", ")") + + /** Converts this $coll to a list. + * + * Note: the order of elements is undefined. + * + * @return a list containing all elements of this $coll. + */ + override def toList: immutable.List[A] = immutable.List.from(this.iterator) + + /** This method clones the priority queue. + * + * @return a priority queue with the same elements. + */ + override def clone(): PriorityQueue[A] = { + val pq = new PriorityQueue[A] + val n = resarr.p_size0 + pq.resarr.p_ensureSize(n) + java.lang.System.arraycopy(resarr.p_array, 1, pq.resarr.p_array, 1, n-1) + pq.resarr.p_size0 = n + pq + } + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if (copied > 0) { + Array.copy(resarr.p_array, 1, xs, start, copied) + } + copied + } + + @deprecated("Use `PriorityQueue` instead", "2.13.0") + def orderedCompanion: PriorityQueue.type = PriorityQueue + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(PriorityQueue.evidenceIterableFactory[A], this) + + override protected[this] def className = "PriorityQueue" +} + + +@SerialVersionUID(3L) +object PriorityQueue extends SortedIterableFactory[PriorityQueue] { + def newBuilder[A : Ordering]: Builder[A, PriorityQueue[A]] = { + new Builder[A, PriorityQueue[A]] { + val pq = new PriorityQueue[A] + def addOne(elem: A): this.type = { pq.unsafeAdd(elem); this } + def result(): PriorityQueue[A] = { pq.heapify(1); pq } + def clear(): Unit = pq.clear() + } + } + + def empty[A : Ordering]: PriorityQueue[A] = new PriorityQueue[A] + + def from[E : Ordering](it: IterableOnce[E]): PriorityQueue[E] = { + val b = newBuilder[E] + b ++= it + b.result() + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/Queue.scala b/tests/pos-special/stdlib/collection/mutable/Queue.scala new file mode 100644 index 000000000000..18cce0bd3852 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Queue.scala @@ -0,0 +1,138 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.nowarn +import scala.collection.generic.DefaultSerializable + + +/** `Queue` objects implement data structures that allow to + * insert and retrieve elements in a first-in-first-out (FIFO) manner. + * + * @define Coll `mutable.Queue` + * @define coll mutable queue + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +class Queue[A] protected (array: Array[AnyRef], start: Int, end: Int) + extends ArrayDeque[A](array, start, end) + with IndexedSeqOps[A, Queue, Queue[A]] + with StrictOptimizedSeqOps[A, Queue, Queue[A]] + with IterableFactoryDefaults[A, Queue] + with ArrayDequeOps[A, Queue, Queue[A]] + with Cloneable[Queue[A]] + with DefaultSerializable { + + def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = + this(ArrayDeque.alloc(initialSize), start = 0, end = 0) + + override def iterableFactory: SeqFactory[Queue] = Queue + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "Queue" + + /** + * Add elements to the end of this queue + * + * @param elem + * @return this + */ + def enqueue(elem: A): this.type = this += elem + + /** Enqueue two or more elements at the end of the queue. The last element + * of the sequence will be on end of the queue. + * + * @param elems the element sequence. + * @return this + */ + def enqueue(elem1: A, elem2: A, elems: A*): this.type = enqueue(elem1).enqueue(elem2).enqueueAll(elems) + + /** Enqueues all elements in the given iterable object into the queue. The + * last element in the iterable object will be on front of the new queue. + * + * @param elems the iterable object. + * @return this + */ + def enqueueAll(elems: scala.collection.IterableOnce[A]): this.type = this ++= elems + + /** + * Removes the first element from this queue and returns it + * + * @return + * @throws NoSuchElementException when queue is empty + */ + def dequeue(): A = removeHead() + + /** Returns the first element in the queue which satisfies the + * given predicate, and removes this element from the queue. + * + * @param p the predicate used for choosing the first element + * @return the first element of the queue for which p yields true + */ + def dequeueFirst(p: A => Boolean): Option[A] = + removeFirst(p) + + /** Returns all elements in the queue which satisfy the + * given predicate, and removes those elements from the queue. + * + * @param p the predicate used for choosing elements + * @return a sequence of all elements in the queue for which + * p yields true. + */ + def dequeueAll(p: A => Boolean): scala.collection.immutable.Seq[A] = + removeAll(p) + + /** + * Returns and dequeues all elements from the queue which satisfy the given predicate + * + * @param f the predicate used for choosing elements + * @return The removed elements + */ + def dequeueWhile(f: A => Boolean): scala.collection.Seq[A] = removeHeadWhile(f) + + /** Returns the first element in the queue, or throws an error if there + * is no element contained in the queue. + * + * @return the first element. + */ + @`inline` final def front: A = head + + override protected def klone(): Queue[A] = { + val bf = newSpecificBuilder + bf ++= this + bf.result() + } + + override protected def ofArray(array: Array[AnyRef], end: Int): Queue[A] = + new Queue(array, start = 0, end) + +} + +/** + * $factoryInfo + * @define coll queue + * @define Coll `Queue` + */ +@SerialVersionUID(3L) +object Queue extends StrictOptimizedSeqFactory[Queue] { + + def from[A](source: IterableOnce[A]): Queue[A] = empty ++= source + + def empty[A]: Queue[A] = new Queue + + def newBuilder[A]: Builder[A, Queue[A]] = new GrowableBuilder[A, Queue[A]](empty) + +} diff --git a/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala b/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala new file mode 100644 index 000000000000..3ac0e1a1f797 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala @@ -0,0 +1,652 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + +import scala.annotation.tailrec +import collection.{AbstractIterator, Iterator} +import java.lang.String + +/** + * An object containing the red-black tree implementation used by mutable `TreeMaps`. + * + * The trees implemented in this object are *not* thread safe. + */ +private[collection] object RedBlackTree { + + // ---- class structure ---- + + // For performance reasons, this implementation uses `null` references to represent leaves instead of a sentinel node. + // Currently, the internal nodes do not store their subtree size - only the tree object keeps track of their size. + // Therefore, while obtaining the size of the whole tree is O(1), knowing the number of entries inside a range is O(n) + // on the size of the range. + + final class Tree[A, B](var root: Node[A, B], var size: Int) { + def treeCopy(): Tree[A, B] = new Tree(copyTree(root), size) + } + + final class Node[A, B](var key: A, var value: B, var red: Boolean, var left: Node[A, B], var right: Node[A, B], var parent: Node[A, B]) { + override def toString: String = "Node(" + key + ", " + value + ", " + red + ", " + left + ", " + right + ")" + } + + object Tree { + def empty[A, B]: Tree[A, B] = new Tree(null, 0) + } + + object Node { + + @`inline` def apply[A, B](key: A, value: B, red: Boolean, + left: Node[A, B], right: Node[A, B], parent: Node[A, B]): Node[A, B] = + new Node(key, value, red, left, right, parent) + + @`inline` def leaf[A, B](key: A, value: B, red: Boolean, parent: Node[A, B]): Node[A, B] = + new Node(key, value, red, null, null, parent) + + def unapply[A, B](t: Node[A, B]) = Some((t.key, t.value, t.left, t.right, t.parent)) + } + + // ---- getters ---- + + def isRed(node: Node[_, _]) = (node ne null) && node.red + def isBlack(node: Node[_, _]) = (node eq null) || !node.red + + // ---- size ---- + + def size(node: Node[_, _]): Int = if (node eq null) 0 else 1 + size(node.left) + size(node.right) + def size(tree: Tree[_, _]): Int = tree.size + def isEmpty(tree: Tree[_, _]) = tree.root eq null + def clear(tree: Tree[_, _]): Unit = { tree.root = null; tree.size = 0 } + + // ---- search ---- + + def get[A: Ordering, B](tree: Tree[A, B], key: A): Option[B] = getNode(tree.root, key) match { + case null => None + case node => Some(node.value) + } + + @tailrec private[this] def getNode[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = + if (node eq null) null + else { + val cmp = ord.compare(key, node.key) + if (cmp < 0) getNode(node.left, key) + else if (cmp > 0) getNode(node.right, key) + else node + } + + def contains[A: Ordering](tree: Tree[A, _], key: A): Boolean = getNode(tree.root, key) ne null + + def min[A, B](tree: Tree[A, B]): Option[(A, B)] = minNode(tree.root) match { + case null => None + case node => Some((node.key, node.value)) + } + + def minKey[A](tree: Tree[A, _]): Option[A] = minNode(tree.root) match { + case null => None + case node => Some(node.key) + } + + private def minNode[A, B](node: Node[A, B]): Node[A, B] = + if (node eq null) null else minNodeNonNull(node) + + @tailrec def minNodeNonNull[A, B](node: Node[A, B]): Node[A, B] = + if (node.left eq null) node else minNodeNonNull(node.left) + + def max[A, B](tree: Tree[A, B]): Option[(A, B)] = maxNode(tree.root) match { + case null => None + case node => Some((node.key, node.value)) + } + + def maxKey[A](tree: Tree[A, _]): Option[A] = maxNode(tree.root) match { + case null => None + case node => Some(node.key) + } + + private def maxNode[A, B](node: Node[A, B]): Node[A, B] = + if (node eq null) null else maxNodeNonNull(node) + + @tailrec def maxNodeNonNull[A, B](node: Node[A, B]): Node[A, B] = + if (node.right eq null) node else maxNodeNonNull(node.right) + + /** + * Returns the first (lowest) map entry with a key equal or greater than `key`. Returns `None` if there is no such + * node. + */ + def minAfter[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Option[(A, B)] = + minNodeAfter(tree.root, key) match { + case null => None + case node => Some((node.key, node.value)) + } + + def minKeyAfter[A](tree: Tree[A, _], key: A)(implicit ord: Ordering[A]): Option[A] = + minNodeAfter(tree.root, key) match { + case null => None + case node => Some(node.key) + } + + private[this] def minNodeAfter[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = { + if (node eq null) null + else { + var y: Node[A, B] = null + var x = node + var cmp = 1 + while ((x ne null) && cmp != 0) { + y = x + cmp = ord.compare(key, x.key) + x = if (cmp < 0) x.left else x.right + } + if (cmp <= 0) y else successor(y) + } + } + + /** + * Returns the last (highest) map entry with a key smaller than `key`. Returns `None` if there is no such node. + */ + def maxBefore[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Option[(A, B)] = + maxNodeBefore(tree.root, key) match { + case null => None + case node => Some((node.key, node.value)) + } + + def maxKeyBefore[A](tree: Tree[A, _], key: A)(implicit ord: Ordering[A]): Option[A] = + maxNodeBefore(tree.root, key) match { + case null => None + case node => Some(node.key) + } + + private[this] def maxNodeBefore[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = { + if (node eq null) null + else { + var y: Node[A, B] = null + var x = node + var cmp = 1 + while ((x ne null) && cmp != 0) { + y = x + cmp = ord.compare(key, x.key) + x = if (cmp < 0) x.left else x.right + } + if (cmp > 0) y else predecessor(y) + } + } + + // ---- insertion ---- + + def insert[A, B](tree: Tree[A, B], key: A, value: B)(implicit ord: Ordering[A]): Unit = { + var y: Node[A, B] = null + var x = tree.root + var cmp = 1 + while ((x ne null) && cmp != 0) { + y = x + cmp = ord.compare(key, x.key) + x = if (cmp < 0) x.left else x.right + } + + if (cmp == 0) y.value = value + else { + val z = Node.leaf(key, value, red = true, y) + + if (y eq null) tree.root = z + else if (cmp < 0) y.left = z + else y.right = z + + fixAfterInsert(tree, z) + tree.size += 1 + } + } + + private[this] def fixAfterInsert[A, B](tree: Tree[A, B], node: Node[A, B]): Unit = { + var z = node + while (isRed(z.parent)) { + if (z.parent eq z.parent.parent.left) { + val y = z.parent.parent.right + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.right) { + z = z.parent + rotateLeft(tree, z) + } + z.parent.red = false + z.parent.parent.red = true + rotateRight(tree, z.parent.parent) + } + } else { // symmetric cases + val y = z.parent.parent.left + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.left) { + z = z.parent + rotateRight(tree, z) + } + z.parent.red = false + z.parent.parent.red = true + rotateLeft(tree, z.parent.parent) + } + } + } + tree.root.red = false + } + + // ---- deletion ---- + + def delete[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Unit = { + val z = getNode(tree.root, key) + if (z ne null) { + var y = z + var yIsRed = y.red + var x: Node[A, B] = null + var xParent: Node[A, B] = null + + if (z.left eq null) { + x = z.right + transplant(tree, z, z.right) + xParent = z.parent + } + else if (z.right eq null) { + x = z.left + transplant(tree, z, z.left) + xParent = z.parent + } + else { + y = minNodeNonNull(z.right) + yIsRed = y.red + x = y.right + + if (y.parent eq z) xParent = y + else { + xParent = y.parent + transplant(tree, y, y.right) + y.right = z.right + y.right.parent = y + } + transplant(tree, z, y) + y.left = z.left + y.left.parent = y + y.red = z.red + } + + if (!yIsRed) fixAfterDelete(tree, x, xParent) + tree.size -= 1 + } + } + + private[this] def fixAfterDelete[A, B](tree: Tree[A, B], node: Node[A, B], parent: Node[A, B]): Unit = { + var x = node + var xParent = parent + while ((x ne tree.root) && isBlack(x)) { + if (x eq xParent.left) { + var w = xParent.right + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + rotateLeft(tree, xParent) + w = xParent.right + } + if (isBlack(w.left) && isBlack(w.right)) { + w.red = true + x = xParent + } else { + if (isBlack(w.right)) { + w.left.red = false + w.red = true + rotateRight(tree, w) + w = xParent.right + } + w.red = xParent.red + xParent.red = false + w.right.red = false + rotateLeft(tree, xParent) + x = tree.root + } + } else { // symmetric cases + var w = xParent.left + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + rotateRight(tree, xParent) + w = xParent.left + } + if (isBlack(w.right) && isBlack(w.left)) { + w.red = true + x = xParent + } else { + if (isBlack(w.left)) { + w.right.red = false + w.red = true + rotateLeft(tree, w) + w = xParent.left + } + w.red = xParent.red + xParent.red = false + w.left.red = false + rotateRight(tree, xParent) + x = tree.root + } + } + xParent = x.parent + } + if (x ne null) x.red = false + } + + // ---- helpers ---- + + /** + * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is, + * therefore, the last node), this method returns `null`. + */ + private[this] def successor[A, B](node: Node[A, B]): Node[A, B] = { + if (node.right ne null) minNodeNonNull(node.right) + else { + var x = node + var y = x.parent + while ((y ne null) && (x eq y.right)) { + x = y + y = y.parent + } + y + } + } + + /** + * Returns the node that precedes `node` in an in-order tree traversal. If `node` has the minimum key (and is, + * therefore, the first node), this method returns `null`. + */ + private[this] def predecessor[A, B](node: Node[A, B]): Node[A, B] = { + if (node.left ne null) maxNodeNonNull(node.left) + else { + var x = node + var y = x.parent + while ((y ne null) && (x eq y.left)) { + x = y + y = y.parent + } + y + } + } + + private[this] def rotateLeft[A, B](tree: Tree[A, B], x: Node[A, B]): Unit = if (x ne null) { + // assert(x.right ne null) + val y = x.right + x.right = y.left + + if (y.left ne null) y.left.parent = x + y.parent = x.parent + + if (x.parent eq null) tree.root = y + else if (x eq x.parent.left) x.parent.left = y + else x.parent.right = y + + y.left = x + x.parent = y + } + + private[this] def rotateRight[A, B](tree: Tree[A, B], x: Node[A, B]): Unit = if (x ne null) { + // assert(x.left ne null) + val y = x.left + x.left = y.right + + if (y.right ne null) y.right.parent = x + y.parent = x.parent + + if (x.parent eq null) tree.root = y + else if (x eq x.parent.right) x.parent.right = y + else x.parent.left = y + + y.right = x + x.parent = y + } + + /** + * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous + * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged. + */ + private[this] def transplant[A, B](tree: Tree[A, B], to: Node[A, B], from: Node[A, B]): Unit = { + if (to.parent eq null) tree.root = from + else if (to eq to.parent.left) to.parent.left = from + else to.parent.right = from + + if (from ne null) from.parent = to.parent + } + + // ---- tree traversal ---- + + def foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = foreachNode(tree.root, f) + + private[this] def foreachNode[A, B, U](node: Node[A, B], f: ((A, B)) => U): Unit = + if (node ne null) foreachNodeNonNull(node, f) + + private[this] def foreachNodeNonNull[A, B, U](node: Node[A, B], f: ((A, B)) => U): Unit = { + if (node.left ne null) foreachNodeNonNull(node.left, f) + f((node.key, node.value)) + if (node.right ne null) foreachNodeNonNull(node.right, f) + } + + def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = { + def g(node: Node[A, _]): Unit = { + val l = node.left + if(l ne null) g(l) + f(node.key) + val r = node.right + if(r ne null) g(r) + } + val r = tree.root + if(r ne null) g(r) + } + + def foreachEntry[A, B, U](tree: Tree[A, B], f: (A, B) => U): Unit = { + def g(node: Node[A, B]): Unit = { + val l = node.left + if(l ne null) g(l) + f(node.key, node.value) + val r = node.right + if(r ne null) g(r) + } + val r = tree.root + if(r ne null) g(r) + } + + def transform[A, B](tree: Tree[A, B], f: (A, B) => B): Unit = transformNode(tree.root, f) + + private[this] def transformNode[A, B, U](node: Node[A, B], f: (A, B) => B): Unit = + if (node ne null) transformNodeNonNull(node, f) + + private[this] def transformNodeNonNull[A, B, U](node: Node[A, B], f: (A, B) => B): Unit = { + if (node.left ne null) transformNodeNonNull(node.left, f) + node.value = f(node.key, node.value) + if (node.right ne null) transformNodeNonNull(node.right, f) + } + + def iterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[(A, B)] = + new EntriesIterator(tree, start, end) + + def keysIterator[A: Ordering](tree: Tree[A, _], start: Option[A] = None, end: Option[A] = None): Iterator[A] = + new KeysIterator(tree, start, end) + + def valuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[B] = + new ValuesIterator(tree, start, end) + + private[this] abstract class TreeIterator[A, B, R](tree: Tree[A, B], start: Option[A], end: Option[A]) + (implicit ord: Ordering[A]) extends AbstractIterator[R] { + + protected def nextResult(node: Node[A, B]): R + + def hasNext: Boolean = nextNode ne null + + @throws[NoSuchElementException] + def next(): R = nextNode match { + case null => throw new NoSuchElementException("next on empty iterator") + case node => + nextNode = successor(node) + setNullIfAfterEnd() + nextResult(node) + } + + private[this] var nextNode: Node[A, B] = start match { + case None => minNode(tree.root) + case Some(from) => minNodeAfter(tree.root, from) + } + + private[this] def setNullIfAfterEnd(): Unit = + if (end.isDefined && (nextNode ne null) && ord.compare(nextNode.key, end.get) >= 0) + nextNode = null + + setNullIfAfterEnd() + } + + private[this] final class EntriesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A]) + extends TreeIterator[A, B, (A, B)](tree, start, end) { + + def nextResult(node: Node[A, B]) = (node.key, node.value) + } + + private[this] final class KeysIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A]) + extends TreeIterator[A, B, A](tree, start, end) { + + def nextResult(node: Node[A, B]) = node.key + } + + private[this] final class ValuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A]) + extends TreeIterator[A, B, B](tree, start, end) { + + def nextResult(node: Node[A, B]) = node.value + } + + // ---- debugging ---- + + /** + * Checks if the tree is in a valid state. That happens if: + * - It is a valid binary search tree; + * - All red-black properties are satisfied; + * - All non-null nodes have their `parent` reference correct; + * - The size variable in `tree` corresponds to the actual size of the tree. + */ + def isValid[A: Ordering, B](tree: Tree[A, B]): Boolean = + isValidBST(tree.root) && hasProperParentRefs(tree) && isValidRedBlackTree(tree) && size(tree.root) == tree.size + + /** + * Returns true if all non-null nodes have their `parent` reference correct. + */ + private[this] def hasProperParentRefs[A, B](tree: Tree[A, B]): Boolean = { + + def hasProperParentRefs(node: Node[A, B]): Boolean = { + if (node eq null) true + else { + if ((node.left ne null) && (node.left.parent ne node) || + (node.right ne null) && (node.right.parent ne node)) false + else hasProperParentRefs(node.left) && hasProperParentRefs(node.right) + } + } + + if(tree.root eq null) true + else (tree.root.parent eq null) && hasProperParentRefs(tree.root) + } + + /** + * Returns true if this node follows the properties of a binary search tree. + */ + private[this] def isValidBST[A, B](node: Node[A, B])(implicit ord: Ordering[A]): Boolean = { + if (node eq null) true + else { + if ((node.left ne null) && (ord.compare(node.key, node.left.key) <= 0) || + (node.right ne null) && (ord.compare(node.key, node.right.key) >= 0)) false + else isValidBST(node.left) && isValidBST(node.right) + } + } + + /** + * Returns true if the tree has all the red-black tree properties: if the root node is black, if all children of red + * nodes are black and if the path from any node to any of its null children has the same number of black nodes. + */ + private[this] def isValidRedBlackTree[A, B](tree: Tree[A, B]): Boolean = { + + def noRedAfterRed(node: Node[A, B]): Boolean = { + if (node eq null) true + else if (node.red && (isRed(node.left) || isRed(node.right))) false + else noRedAfterRed(node.left) && noRedAfterRed(node.right) + } + + def blackHeight(node: Node[A, B]): Int = { + if (node eq null) 1 + else { + val lh = blackHeight(node.left) + val rh = blackHeight(node.right) + + if (lh == -1 || lh != rh) -1 + else if (isRed(node)) lh + else lh + 1 + } + } + + isBlack(tree.root) && noRedAfterRed(tree.root) && blackHeight(tree.root) >= 0 + } + + // building + + /** Build a Tree suitable for a TreeSet from an ordered sequence of keys */ + def fromOrderedKeys[A](xs: Iterator[A], size: Int): Tree[A, Null] = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): Node[A, Null] = size match { + case 0 => null + case 1 => new Node(xs.next(), null, level == maxUsedDepth && level != 1, null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val x = xs.next() + val right = f(level+1, size-1-leftSize) + val n = new Node(x, null, false, left, right, null) + if(left ne null) left.parent = n + right.parent = n + n + } + new Tree(f(1, size), size) + } + + /** Build a Tree suitable for a TreeMap from an ordered sequence of key/value pairs */ + def fromOrderedEntries[A, B](xs: Iterator[(A, B)], size: Int): Tree[A, B] = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): Node[A, B] = size match { + case 0 => null + case 1 => + val (k, v) = xs.next() + new Node(k, v, level == maxUsedDepth && level != 1, null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val (k, v) = xs.next() + val right = f(level+1, size-1-leftSize) + val n = new Node(k, v, false, left, right, null) + if(left ne null) left.parent = n + right.parent = n + n + } + new Tree(f(1, size), size) + } + + def copyTree[A, B](n: Node[A, B]): Node[A, B] = + if(n eq null) null else { + val c = new Node(n.key, n.value, n.red, copyTree(n.left), copyTree(n.right), null) + if(c.left != null) c.left.parent = c + if(c.right != null) c.right.parent = c + c + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala new file mode 100644 index 000000000000..d7d3b6db4f09 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala @@ -0,0 +1,55 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + + +/** `ReusableBuilder` is a marker trait that indicates that a `Builder` + * can be reused to build more than one instance of a collection. In + * particular, calling `result()` followed by `clear()` will produce a + * collection and reset the builder to begin building a new collection + * of the same type. + * + * In general no method other than `clear()` may be called after `result()`. + * It is up to subclasses to implement and to document other allowed sequences + * of operations (e.g. calling other methods after `result()` in order to obtain + * different snapshots of a collection under construction). + * + * @tparam Elem the type of elements that get added to the builder. + * @tparam To the type of collection that it produced. + * + * @define multipleResults + * + * This Builder can be reused after calling `result()` without an + * intermediate call to `clear()` in order to build multiple related results. + */ +trait ReusableBuilder[-Elem, +To] extends Builder[Elem, To] { + /** Clears the contents of this builder. + * After execution of this method, the builder will contain no elements. + * + * If executed immediately after a call to `result()`, this allows a new + * instance of the same type of collection to be built. + */ + override def clear(): Unit // Note: overriding for Scaladoc only! + + /** Produces a collection from the added elements. + * + * After a call to `result`, the behavior of all other methods is undefined + * save for `clear()`. If `clear()` is called, then the builder is reset and + * may be used to build another instance. + * + * @return a collection containing the elements added to this builder. + */ + override def result(): To // Note: overriding for Scaladoc only! +} diff --git a/tests/pos-special/stdlib/collection/mutable/Seq.scala b/tests/pos-special/stdlib/collection/mutable/Seq.scala index 443eec379c1b..e83d79987208 100644 --- a/tests/pos-special/stdlib/collection/mutable/Seq.scala +++ b/tests/pos-special/stdlib/collection/mutable/Seq.scala @@ -13,7 +13,6 @@ package scala.collection.mutable import scala.collection.{IterableFactoryDefaults, SeqFactory} -import language.experimental.captureChecking trait Seq[A] extends Iterable[A] diff --git a/tests/pos-special/stdlib/collection/mutable/SeqMap.scala b/tests/pos-special/stdlib/collection/mutable/SeqMap.scala new file mode 100644 index 000000000000..67066f99e07e --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/SeqMap.scala @@ -0,0 +1,38 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +/** + * A generic trait for ordered mutable maps. Concrete classes have to provide + * functionality for the abstract methods in `SeqMap`. + * + * Note that when checking for equality [[SeqMap]] does not take into account + * ordering. + * + * @tparam K the type of the keys contained in this linked map. + * @tparam V the type of the values associated with the keys in this linked map. + * + * @define coll mutable Seq map + * @define Coll `mutable.SeqMap` + */ + +trait SeqMap[K, V] extends Map[K, V] + with collection.SeqMap[K, V] + with MapOps[K, V, SeqMap, SeqMap[K, V]] + with MapFactoryDefaults[K, V, SeqMap, Iterable] { + override def mapFactory: MapFactory[SeqMap] = SeqMap +} + +object SeqMap extends MapFactory.Delegate[SeqMap](LinkedHashMap) diff --git a/tests/pos-special/stdlib/collection/mutable/Set.scala b/tests/pos-special/stdlib/collection/mutable/Set.scala new file mode 100644 index 000000000000..6530e8fedf05 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Set.scala @@ -0,0 +1,122 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + +import scala.collection.{IterableFactory, IterableFactoryDefaults, IterableOps} + +/** Base trait for mutable sets */ +trait Set[A] + extends Iterable[A] + with collection.Set[A] + with SetOps[A, Set, Set[A]] + with IterableFactoryDefaults[A, Set] { + + override def iterableFactory: IterableFactory[Set] = Set +} + +/** + * @define coll mutable set + * @define Coll `mutable.Set` + */ +trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]] + extends collection.SetOps[A, CC, C] + with IterableOps[A, CC, C] // only needed so we can use super[IterableOps] below + with Cloneable[C] + with Builder[A, C] + with Growable[A] + with Shrinkable[A] { + + def result(): C = coll + + /** Check whether the set contains the given element, and add it if not. + * + * @param elem the element to be added + * @return true if the element was added + */ + def add(elem: A): Boolean = + !contains(elem) && { + coll += elem; true + } + + /** Updates the presence of a single element in this set. + * + * This method allows one to add or remove an element `elem` + * from this set depending on the value of parameter `included`. + * Typically, one would use the following syntax: + * {{{ + * set(elem) = true // adds element + * set(elem) = false // removes element + * }}} + * + * @param elem the element to be added or removed + * @param included a flag indicating whether element should be included or excluded. + */ + def update(elem: A, included: Boolean): Unit = { + if (included) add(elem) + else remove(elem) + } + + /** Removes an element from this set. + * + * @param elem the element to be removed + * @return true if this set contained the element before it was removed + */ + def remove(elem: A): Boolean = { + val res = contains(elem) + coll -= elem + res + } + + def diff(that: collection.Set[A]): C = + foldLeft(empty)((result, elem) => if (that contains elem) result else result += elem) + + @deprecated("Use filterInPlace instead", "2.13.0") + @inline final def retain(p: A => Boolean): Unit = filterInPlace(p) + + /** Removes all elements from the set for which do not satisfy a predicate. + * @param p the predicate used to test elements. Only elements for + * which `p` returns `true` are retained in the set; all others + * are removed. + */ + def filterInPlace(p: A => Boolean): this.type = { + if (nonEmpty) { + val array = this.toArray[Any] // scala/bug#7269 toArray avoids ConcurrentModificationException + val arrayLength = array.length + var i = 0 + while (i < arrayLength) { + val elem = array(i).asInstanceOf[A] + if (!p(elem)) { + this -= elem + } + i += 1 + } + } + this + } + + override def clone(): C = empty ++= this + + override def knownSize: Int = super[IterableOps].knownSize +} + +/** + * $factoryInfo + * @define coll mutable set + * @define Coll `mutable.Set` + */ +@SerialVersionUID(3L) +object Set extends IterableFactory.Delegate[Set](HashSet) + + +/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */ +abstract class AbstractSet[A] extends scala.collection.AbstractSet[A] with Set[A] diff --git a/tests/pos-special/stdlib/collection/mutable/Shrinkable.scala b/tests/pos-special/stdlib/collection/mutable/Shrinkable.scala index de2a24ecf01f..006a3b88e49f 100644 --- a/tests/pos-special/stdlib/collection/mutable/Shrinkable.scala +++ b/tests/pos-special/stdlib/collection/mutable/Shrinkable.scala @@ -14,7 +14,6 @@ package scala package collection.mutable import scala.annotation.tailrec -import language.experimental.captureChecking /** This trait forms part of collections that can be reduced * using a `-=` operator. @@ -53,7 +52,7 @@ trait Shrinkable[-A] { * @param xs the iterator producing the elements to remove. * @return the $coll itself */ - def subtractAll(xs: collection.IterableOnce[A]^): this.type = { + def subtractAll(xs: collection.IterableOnce[A]): this.type = { @tailrec def loop(xs: collection.LinearSeq[A]): Unit = { if (xs.nonEmpty) { subtractOne(xs.head) @@ -75,6 +74,6 @@ trait Shrinkable[-A] { } /** Alias for `subtractAll` */ - @`inline` final def --= (xs: collection.IterableOnce[A]^): this.type = subtractAll(xs) + @`inline` final def --= (xs: collection.IterableOnce[A]): this.type = subtractAll(xs) } diff --git a/tests/pos-special/stdlib/collection/mutable/SortedMap.scala b/tests/pos-special/stdlib/collection/mutable/SortedMap.scala new file mode 100644 index 000000000000..eb2f0d231b7a --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/SortedMap.scala @@ -0,0 +1,103 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + +import scala.collection.{SortedMapFactory, SortedMapFactoryDefaults} + +/** + * Base type for mutable sorted map collections + */ +trait SortedMap[K, V] + extends collection.SortedMap[K, V] + with Map[K, V] + with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] + with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map] { + + override def unsorted: Map[K, V] = this + + override def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap + + /** The same sorted map with a given default function. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefault`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d the function mapping keys to values, used for non-present keys + * @return a wrapper of the map with a default value + */ + override def withDefault(d: K => V): SortedMap[K, V] = new SortedMap.WithDefault[K, V](this, d) + + /** The same map with a given default value. + * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. + * are not affected by `withDefaultValue`. + * + * Invoking transformer methods (e.g. `map`) will not preserve the default value. + * + * @param d default value used for non-present keys + * @return a wrapper of the map with a default value + */ + override def withDefaultValue(d: V): SortedMap[K, V] = new SortedMap.WithDefault[K, V](this, _ => d) +} + +trait SortedMapOps[K, V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends collection.SortedMapOps[K, V, CC, C] + with MapOps[K, V, Map, C] { + + def unsorted: Map[K, V] + + @deprecated("Use m.clone().addOne((k,v)) instead of m.updated(k, v)", "2.13.0") + override def updated[V1 >: V](key: K, value: V1): CC[K, V1] = + clone().asInstanceOf[CC[K, V1]].addOne((key, value)) +} + +@SerialVersionUID(3L) +object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { + + @SerialVersionUID(3L) + final class WithDefault[K, V](underlying: SortedMap[K, V], defaultValue: K => V) + extends Map.WithDefault[K, V](underlying, defaultValue) + with SortedMap[K, V] + with SortedMapOps[K, V, SortedMap, WithDefault[K, V]] + with Serializable { + + override def sortedMapFactory: SortedMapFactory[SortedMap] = underlying.sortedMapFactory + + def iteratorFrom(start: K): scala.collection.Iterator[(K, V)] = underlying.iteratorFrom(start) + + def keysIteratorFrom(start: K): scala.collection.Iterator[K] = underlying.keysIteratorFrom(start) + + implicit def ordering: Ordering[K] = underlying.ordering + + def rangeImpl(from: Option[K], until: Option[K]): WithDefault[K, V] = + new WithDefault[K, V](underlying.rangeImpl(from, until), defaultValue) + + // Need to override following methods to match type signatures of `SortedMap.WithDefault` + // for operations preserving default value + override def subtractOne(elem: K): WithDefault.this.type = { underlying.subtractOne(elem); this } + + override def addOne(elem: (K, V)): WithDefault.this.type = { underlying.addOne(elem); this } + + override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) + + override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]): SortedMap[K, V2] = + underlying.concat(suffix).withDefault(defaultValue) + + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]): WithDefault[K, V] = + new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue) + + override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] = + SortedMap.newBuilder.mapResult((p: SortedMap[K, V]) => new WithDefault[K, V](p, defaultValue)) + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/SortedSet.scala b/tests/pos-special/stdlib/collection/mutable/SortedSet.scala new file mode 100644 index 000000000000..2bcb8dc7845a --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/SortedSet.scala @@ -0,0 +1,48 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +/** + * Base type for mutable sorted set collections + */ +trait SortedSet[A] + extends Set[A] + with collection.SortedSet[A] + with SortedSetOps[A, SortedSet, SortedSet[A]] + with SortedSetFactoryDefaults[A, SortedSet, Set] { + + override def unsorted: Set[A] = this + + override def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet +} + +/** + * @define coll mutable sorted set + * @define Coll `mutable.Sortedset` + */ +trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] + extends SetOps[A, Set, C] + with collection.SortedSetOps[A, CC, C] { + + def unsorted: Set[A] +} + +/** + * $factoryInfo + * @define coll mutable sorted set + * @define Coll `mutable.Sortedset` + */ +@SerialVersionUID(3L) +object SortedSet extends SortedIterableFactory.Delegate[SortedSet](TreeSet) diff --git a/tests/pos-special/stdlib/collection/mutable/Stack.scala b/tests/pos-special/stdlib/collection/mutable/Stack.scala new file mode 100644 index 000000000000..675666bc805c --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/Stack.scala @@ -0,0 +1,142 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + +import scala.annotation.{migration, nowarn} +import scala.collection.generic.DefaultSerializable +import scala.collection.{IterableFactoryDefaults, IterableOnce, SeqFactory, StrictOptimizedSeqFactory, StrictOptimizedSeqOps} + +/** A stack implements a data structure which allows to store and retrieve + * objects in a last-in-first-out (LIFO) fashion. + * + * Note that operations which consume and produce iterables preserve order, + * rather than reversing it (as would be expected from building a new stack + * by pushing an element at a time). + * + * @tparam A type of the elements contained in this stack. + * + * @define Coll `Stack` + * @define coll stack + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@migration("Stack is now based on an ArrayDeque instead of a linked list", "2.13.0") +class Stack[A] protected (array: Array[AnyRef], start: Int, end: Int) + extends ArrayDeque[A](array, start, end) + with IndexedSeqOps[A, Stack, Stack[A]] + with StrictOptimizedSeqOps[A, Stack, Stack[A]] + with IterableFactoryDefaults[A, Stack] + with ArrayDequeOps[A, Stack, Stack[A]] + with Cloneable[Stack[A]] + with DefaultSerializable { + + def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = + this(ArrayDeque.alloc(initialSize), start = 0, end = 0) + + override def iterableFactory: SeqFactory[Stack] = Stack + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "Stack" + + /** + * Add elements to the top of this stack + * + * @param elem + * @return + */ + def push(elem: A): this.type = prepend(elem) + + /** Push two or more elements onto the stack. The last element + * of the sequence will be on top of the new stack. + * + * @param elems the element sequence. + * @return the stack with the new elements on top. + */ + def push(elem1: A, elem2: A, elems: A*): this.type = { + val k = elems.knownSize + ensureSize(length + (if(k >= 0) k + 2 else 3)) + prepend(elem1).prepend(elem2).pushAll(elems) + } + + /** Push all elements in the given iterable object onto the stack. The + * last element in the iterable object will be on top of the new stack. + * + * @param elems the iterable object. + * @return the stack with the new elements on top. + */ + def pushAll(elems: scala.collection.IterableOnce[A]): this.type = + prependAll(elems match { + case it: scala.collection.Seq[A] => it.view.reverse + case it => IndexedSeq.from(it).view.reverse + }) + + /** + * Removes the top element from this stack and return it + * + * @return + * @throws NoSuchElementException when stack is empty + */ + def pop(): A = removeHead() + + /** + * Pop all elements from this stack and return it + * + * @return The removed elements + */ + def popAll(): scala.collection.Seq[A] = removeAll() + + /** + * Returns and removes all elements from the top of this stack which satisfy the given predicate + * + * @param f the predicate used for choosing elements + * @return The removed elements + */ + def popWhile(f: A => Boolean): scala.collection.Seq[A] = removeHeadWhile(f) + + /** Returns the top element of the stack. This method will not remove + * the element from the stack. An error is signaled if there is no + * element on the stack. + * + * @throws NoSuchElementException + * @return the top element + */ + @`inline` final def top: A = head + + override protected def klone(): Stack[A] = { + val bf = newSpecificBuilder + bf ++= this + bf.result() + } + + override protected def ofArray(array: Array[AnyRef], end: Int): Stack[A] = + new Stack(array, start = 0, end) + +} + +/** + * $factoryInfo + * @define coll stack + * @define Coll `Stack` + */ +@SerialVersionUID(3L) +object Stack extends StrictOptimizedSeqFactory[Stack] { + + def from[A](source: IterableOnce[A]): Stack[A] = empty ++= source + + def empty[A]: Stack[A] = new Stack + + def newBuilder[A]: Builder[A, Stack[A]] = new GrowableBuilder[A, Stack[A]](empty) + +} diff --git a/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala b/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala index c7859214821d..1d8b9563e917 100644 --- a/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala +++ b/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala @@ -14,7 +14,6 @@ package scala.collection.mutable import scala.collection.{IterableFactoryDefaults, IterableOnce} import scala.collection.immutable.WrappedString -import language.experimental.captureChecking import scala.Predef.{ // unimport char-related implicit conversions to avoid triggering them accidentally genericArrayOps => _, @@ -82,7 +81,7 @@ final class StringBuilder(val underlying: java.lang.StringBuilder) extends Abstr // Methods required to make this an IndexedSeq: def apply(i: Int): Char = underlying.charAt(i) - override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]^): StringBuilder = + override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]): StringBuilder = new StringBuilder() appendAll coll override protected def newSpecificBuilder: Builder[Char, StringBuilder] = @@ -185,7 +184,7 @@ final class StringBuilder(val underlying: java.lang.StringBuilder) extends Abstr * @param xs the characters to be appended. * @return this StringBuilder. */ - def appendAll(xs: IterableOnce[Char]^): this.type = { + def appendAll(xs: IterableOnce[Char]): this.type = { xs match { case x: WrappedString => underlying append x.unwrap case x: ArraySeq.ofChar => underlying append x.array @@ -314,7 +313,7 @@ final class StringBuilder(val underlying: java.lang.StringBuilder) extends Abstr * @return this StringBuilder. * @throws StringIndexOutOfBoundsException if the index is out of bounds. */ - def insertAll(index: Int, xs: IterableOnce[Char]^): this.type = + def insertAll(index: Int, xs: IterableOnce[Char]): this.type = insertAll(index, (ArrayBuilder.make[Char] ++= xs).result()) /** Inserts the given Array[Char] into this sequence at the given index. diff --git a/tests/pos-special/stdlib/collection/mutable/TreeMap.scala b/tests/pos-special/stdlib/collection/mutable/TreeMap.scala new file mode 100644 index 000000000000..1af968a08ac3 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/TreeMap.scala @@ -0,0 +1,257 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.{RedBlackTree => RB} + +/** + * A mutable sorted map implemented using a mutable red-black tree as underlying data structure. + * + * @param ordering the implicit ordering used to compare objects of type `A`. + * @tparam K the type of the keys contained in this tree map. + * @tparam V the type of the values associated with the keys. + * + * @define Coll mutable.TreeMap + * @define coll mutable tree map + */ +sealed class TreeMap[K, V] private (tree: RB.Tree[K, V])(implicit val ordering: Ordering[K]) + extends AbstractMap[K, V] + with SortedMap[K, V] + with SortedMapOps[K, V, TreeMap, TreeMap[K, V]] + with StrictOptimizedIterableOps[(K, V), Iterable, TreeMap[K, V]] + with StrictOptimizedMapOps[K, V, Map, TreeMap[K, V]] + with StrictOptimizedSortedMapOps[K, V, TreeMap, TreeMap[K, V]] + with SortedMapFactoryDefaults[K, V, TreeMap, Iterable, Map] + with DefaultSerializable { + + override def sortedMapFactory = TreeMap + + /** + * Creates an empty `TreeMap`. + * @param ord the implicit ordering used to compare objects of type `K`. + * @return an empty `TreeMap`. + */ + def this()(implicit ord: Ordering[K]) = this(RB.Tree.empty)(ord) + + def iterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else RB.iterator(tree) + } + + override def keysIterator: Iterator[K] = { + if (isEmpty) Iterator.empty + else RB.keysIterator(tree, None) + } + + override def valuesIterator: Iterator[V] = { + if (isEmpty) Iterator.empty + else RB.valuesIterator(tree, None) + } + + def keysIteratorFrom(start: K): Iterator[K] = { + if (isEmpty) Iterator.empty + else RB.keysIterator(tree, Some(start)) + } + + def iteratorFrom(start: K): Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else RB.iterator(tree, Some(start)) + } + + override def valuesIteratorFrom(start: K): Iterator[V] = { + if (isEmpty) Iterator.empty + else RB.valuesIterator(tree, Some(start)) + } + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit = + shape.parUnbox( + scala.collection.convert.impl.AnyBinaryTreeStepper.from[(K, V), RB.Node[K, V]]( + size, tree.root, _.left, _.right, x => (x.key, x.value) + ) + ) + + override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Node[K, V] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree.root, _.left, _.right, _.key.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[K, T](size, tree.root, _.left, _.right, _.key)) + } + s.asInstanceOf[S with EfficientSplit] + } + + override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Node[K, V] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[V, T] (size, tree.root, _.left, _.right, _.value)) + } + s.asInstanceOf[S with EfficientSplit] + } + + def addOne(elem: (K, V)): this.type = { RB.insert(tree, elem._1, elem._2); this } + + def subtractOne(elem: K): this.type = { RB.delete(tree, elem); this } + + override def clear(): Unit = RB.clear(tree) + + def get(key: K): Option[V] = RB.get(tree, key) + + /** + * Creates a ranged projection of this map. Any mutations in the ranged projection will update the original map and + * vice versa. + * + * Only entries with keys between this projection's key range will ever appear as elements of this map, independently + * of whether the entries are added through the original map or through this view. That means that if one inserts a + * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the + * newly added entry. Mutations are always reflected in the original map, though. + * + * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower + * bound. + * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper + * bound. + */ + def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = new TreeMapProjection(from, until) + + override def foreach[U](f: ((K, V)) => U): Unit = RB.foreach(tree, f) + override def foreachEntry[U](f: (K, V) => U): Unit = RB.foreachEntry(tree, f) + + override def size: Int = RB.size(tree) + override def knownSize: Int = size + override def isEmpty: Boolean = RB.isEmpty(tree) + + override def contains(key: K): Boolean = RB.contains(tree, key) + + override def head: (K, V) = RB.min(tree).get + + override def last: (K, V) = RB.max(tree).get + + override def minAfter(key: K): Option[(K, V)] = RB.minAfter(tree, key) + + override def maxBefore(key: K): Option[(K, V)] = RB.maxBefore(tree, key) + + override protected[this] def className: String = "TreeMap" + + + /** + * A ranged projection of a [[TreeMap]]. Mutations on this map affect the original map and vice versa. + * + * Only entries with keys between this projection's key range will ever appear as elements of this map, independently + * of whether the entries are added through the original map or through this view. That means that if one inserts a + * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the + * newly added entry. Mutations are always reflected in the original map, though. + * + * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower + * bound. + * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper + * bound. + */ + private[this] final class TreeMapProjection(from: Option[K], until: Option[K]) extends TreeMap[K, V](tree) { + + /** + * Given a possible new lower bound, chooses and returns the most constraining one (the maximum). + */ + private[this] def pickLowerBound(newFrom: Option[K]): Option[K] = (from, newFrom) match { + case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr)) + case (None, _) => newFrom + case _ => from + } + + /** + * Given a possible new upper bound, chooses and returns the most constraining one (the minimum). + */ + private[this] def pickUpperBound(newUntil: Option[K]): Option[K] = (until, newUntil) match { + case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt)) + case (None, _) => newUntil + case _ => until + } + + /** + * Returns true if the argument is inside the view bounds (between `from` and `until`). + */ + private[this] def isInsideViewBounds(key: K): Boolean = { + val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0 + val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0 + afterFrom && beforeUntil + } + + override def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = + new TreeMapProjection(pickLowerBound(from), pickUpperBound(until)) + + override def get(key: K) = if (isInsideViewBounds(key)) RB.get(tree, key) else None + + override def iterator = if (RB.size(tree) == 0) Iterator.empty else RB.iterator(tree, from, until) + override def keysIterator: Iterator[K] = if (RB.size(tree) == 0) Iterator.empty else RB.keysIterator(tree, from, until) + override def valuesIterator: Iterator[V] = if (RB.size(tree) == 0) Iterator.empty else RB.valuesIterator(tree, from, until) + override def keysIteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.keysIterator(tree, pickLowerBound(Some(start)), until) + override def iteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.iterator(tree, pickLowerBound(Some(start)), until) + override def valuesIteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.valuesIterator(tree, pickLowerBound(Some(start)), until) + override def size = if (RB.size(tree) == 0) 0 else iterator.length + override def knownSize: Int = if (RB.size(tree) == 0) 0 else -1 + override def isEmpty = RB.size(tree) == 0 || !iterator.hasNext + override def contains(key: K) = isInsideViewBounds(key) && RB.contains(tree, key) + + override def head = headOption.get + override def headOption = { + val entry = if (from.isDefined) RB.minAfter(tree, from.get) else RB.min(tree) + (entry, until) match { + case (Some(e), Some(unt)) if ordering.compare(e._1, unt) >= 0 => None + case _ => entry + } + } + + override def last = lastOption.get + override def lastOption = { + val entry = if (until.isDefined) RB.maxBefore(tree, until.get) else RB.max(tree) + (entry, from) match { + case (Some(e), Some(fr)) if ordering.compare(e._1, fr) < 0 => None + case _ => entry + } + } + + // Using the iterator should be efficient enough; if performance is deemed a problem later, specialized + // `foreach(f, from, until)` and `transform(f, from, until)` methods can be created in `RedBlackTree`. See + // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this. + override def foreach[U](f: ((K, V)) => U): Unit = iterator.foreach(f) + + override def clone() = super.clone().rangeImpl(from, until) + } + +} + +/** + * $factoryInfo + * + * @define Coll mutable.TreeMap + * @define coll mutable tree map + */ +@SerialVersionUID(3L) +object TreeMap extends SortedMapFactory[TreeMap] { + + def from[K : Ordering, V](it: IterableOnce[(K, V)]): TreeMap[K, V] = + Growable.from(empty[K, V], it) + + def empty[K : Ordering, V]: TreeMap[K, V] = new TreeMap[K, V]() + + def newBuilder[K: Ordering, V]: Builder[(K, V), TreeMap[K, V]] = new GrowableBuilder(empty[K, V]) + +} diff --git a/tests/pos-special/stdlib/collection/mutable/TreeSet.scala b/tests/pos-special/stdlib/collection/mutable/TreeSet.scala new file mode 100644 index 000000000000..bed474dc02a3 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/TreeSet.scala @@ -0,0 +1,218 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection.mutable + +import scala.collection.Stepper.EfficientSplit +import scala.collection.generic.DefaultSerializable +import scala.collection.mutable.{RedBlackTree => RB} +import scala.collection.{SortedIterableFactory, SortedSetFactoryDefaults, Stepper, StepperShape, StrictOptimizedIterableOps, StrictOptimizedSortedSetOps, mutable} + +/** + * A mutable sorted set implemented using a mutable red-black tree as underlying data structure. + * + * @param ordering the implicit ordering used to compare objects of type `A`. + * @tparam A the type of the keys contained in this tree set. + * + * @define Coll mutable.TreeSet + * @define coll mutable tree set + */ +// Original API designed in part by Lucien Pereira +sealed class TreeSet[A] private (private val tree: RB.Tree[A, Null])(implicit val ordering: Ordering[A]) + extends AbstractSet[A] + with SortedSet[A] + with SortedSetOps[A, TreeSet, TreeSet[A]] + with StrictOptimizedIterableOps[A, Set, TreeSet[A]] + with StrictOptimizedSortedSetOps[A, TreeSet, TreeSet[A]] + with SortedSetFactoryDefaults[A, TreeSet, Set] + with DefaultSerializable { + + if (ordering eq null) + throw new NullPointerException("ordering must not be null") + + /** + * Creates an empty `TreeSet`. + * @param ord the implicit ordering used to compare objects of type `A`. + * @return an empty `TreeSet`. + */ + def this()(implicit ord: Ordering[A]) = this(RB.Tree.empty)(ord) + + override def sortedIterableFactory: SortedIterableFactory[TreeSet] = TreeSet + + def iterator: collection.Iterator[A] = RB.keysIterator(tree) + + def iteratorFrom(start: A): collection.Iterator[A] = RB.keysIterator(tree, Some(start)) + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import scala.collection.convert.impl._ + type T = RB.Node[A, Null] + val s = shape.shape match { + case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Int]) + case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Long]) + case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree.root, _.left, _.right, _.key.asInstanceOf[Double]) + case _ => shape.parUnbox(AnyBinaryTreeStepper.from[A, T](size, tree.root, _.left, _.right, _.key)) + } + s.asInstanceOf[S with EfficientSplit] + } + + def addOne(elem: A): this.type = { + RB.insert(tree, elem, null) + this + } + + def subtractOne(elem: A): this.type = { + RB.delete(tree, elem) + this + } + + def clear(): Unit = RB.clear(tree) + + def contains(elem: A): Boolean = RB.contains(tree, elem) + + def unconstrained: collection.Set[A] = this + + def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = new TreeSetProjection(from, until) + + override protected[this] def className: String = "TreeSet" + + override def size: Int = RB.size(tree) + override def knownSize: Int = size + override def isEmpty: Boolean = RB.isEmpty(tree) + + override def head: A = RB.minKey(tree).get + + override def last: A = RB.maxKey(tree).get + + override def minAfter(key: A): Option[A] = RB.minKeyAfter(tree, key) + + override def maxBefore(key: A): Option[A] = RB.maxKeyBefore(tree, key) + + override def foreach[U](f: A => U): Unit = RB.foreachKey(tree, f) + + + /** + * A ranged projection of a [[TreeSet]]. Mutations on this set affect the original set and vice versa. + * + * Only keys between this projection's key range will ever appear as elements of this set, independently of whether + * the elements are added through the original set or through this view. That means that if one inserts an element in + * a view whose key is outside the view's bounds, calls to `contains` will _not_ consider the newly added element. + * Mutations are always reflected in the original set, though. + * + * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower + * bound. + * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper + * bound. + */ + private[this] final class TreeSetProjection(from: Option[A], until: Option[A]) extends TreeSet[A](tree) { + + /** + * Given a possible new lower bound, chooses and returns the most constraining one (the maximum). + */ + private[this] def pickLowerBound(newFrom: Option[A]): Option[A] = (from, newFrom) match { + case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr)) + case (None, _) => newFrom + case _ => from + } + + /** + * Given a possible new upper bound, chooses and returns the most constraining one (the minimum). + */ + private[this] def pickUpperBound(newUntil: Option[A]): Option[A] = (until, newUntil) match { + case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt)) + case (None, _) => newUntil + case _ => until + } + + /** + * Returns true if the argument is inside the view bounds (between `from` and `until`). + */ + private[this] def isInsideViewBounds(key: A): Boolean = { + val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0 + val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0 + afterFrom && beforeUntil + } + + override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = + new TreeSetProjection(pickLowerBound(from), pickUpperBound(until)) + + override def contains(key: A) = isInsideViewBounds(key) && RB.contains(tree, key) + + override def iterator = RB.keysIterator(tree, from, until) + override def iteratorFrom(start: A) = RB.keysIterator(tree, pickLowerBound(Some(start)), until) + + override def size = if (RB.size(tree) == 0) 0 else iterator.length + override def knownSize: Int = if (RB.size(tree) == 0) 0 else -1 + override def isEmpty: Boolean = RB.size(tree) == 0 || !iterator.hasNext + + override def head: A = headOption.get + override def headOption: Option[A] = { + val elem = if (from.isDefined) RB.minKeyAfter(tree, from.get) else RB.minKey(tree) + (elem, until) match { + case (Some(e), Some(unt)) if ordering.compare(e, unt) >= 0 => None + case _ => elem + } + } + + override def last: A = lastOption.get + override def lastOption = { + val elem = if (until.isDefined) RB.maxKeyBefore(tree, until.get) else RB.maxKey(tree) + (elem, from) match { + case (Some(e), Some(fr)) if ordering.compare(e, fr) < 0 => None + case _ => elem + } + } + + // Using the iterator should be efficient enough; if performance is deemed a problem later, a specialized + // `foreachKey(f, from, until)` method can be created in `RedBlackTree`. See + // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this. + override def foreach[U](f: A => U): Unit = iterator.foreach(f) + + override def clone(): mutable.TreeSet[A] = super.clone().rangeImpl(from, until) + + } + +} + +/** + * $factoryInfo + * @define Coll `mutable.TreeSet` + * @define coll mutable tree set + */ +@SerialVersionUID(3L) +object TreeSet extends SortedIterableFactory[TreeSet] { + + def empty[A : Ordering]: TreeSet[A] = new TreeSet[A]() + + def from[E](it: IterableOnce[E])(implicit ordering: Ordering[E]): TreeSet[E] = + it match { + case ts: TreeSet[E] if ordering == ts.ordering => + new TreeSet[E](ts.tree.treeCopy()) + case ss: scala.collection.SortedSet[E] if ordering == ss.ordering => + new TreeSet[E](RB.fromOrderedKeys(ss.iterator, ss.size)) + case r: Range if (ordering eq Ordering.Int) || (ordering eq Ordering.Int.reverse) => + val it = if((ordering eq Ordering.Int) == (r.step > 0)) r.iterator else r.reverseIterator + new TreeSet[E](RB.fromOrderedKeys(it.asInstanceOf[Iterator[E]], r.size)) + case _ => + val t: RB.Tree[E, Null] = RB.Tree.empty + val i = it.iterator + while (i.hasNext) RB.insert(t, i.next(), null) + new TreeSet[E](t) + } + + def newBuilder[A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = new ReusableBuilder[A, TreeSet[A]] { + private[this] var tree: RB.Tree[A, Null] = RB.Tree.empty + def addOne(elem: A): this.type = { RB.insert(tree, elem, null); this } + def result(): TreeSet[A] = new TreeSet[A](tree) + def clear(): Unit = { tree = RB.Tree.empty } + } +} diff --git a/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala b/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala new file mode 100644 index 000000000000..489f2a1b0387 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala @@ -0,0 +1,442 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.annotation.tailrec +import scala.collection.generic.DefaultSerializable +import scala.reflect.ClassTag +import scala.collection.immutable.Nil + +/** A buffer that stores elements in an unrolled linked list. + * + * Unrolled linked lists store elements in linked fixed size + * arrays. + * + * Unrolled buffers retain locality and low memory overhead + * properties of array buffers, but offer much more efficient + * element addition, since they never reallocate and copy the + * internal array. + * + * However, they provide `O(n/m)` complexity random access, + * where `n` is the number of elements, and `m` the size of + * internal array chunks. + * + * Ideal to use when: + * - elements are added to the buffer and then all of the + * elements are traversed sequentially + * - two unrolled buffers need to be concatenated (see `concat`) + * + * Better than singly linked lists for random access, but + * should still be avoided for such a purpose. + * + * @define coll unrolled buffer + * @define Coll `UnrolledBuffer` + * + */ +@SerialVersionUID(3L) +sealed class UnrolledBuffer[T](implicit val tag: ClassTag[T]) + extends AbstractBuffer[T] + with Buffer[T] + with Seq[T] + with SeqOps[T, UnrolledBuffer, UnrolledBuffer[T]] + with StrictOptimizedSeqOps[T, UnrolledBuffer, UnrolledBuffer[T]] + with EvidenceIterableFactoryDefaults[T, UnrolledBuffer, ClassTag] + with Builder[T, UnrolledBuffer[T]] + with DefaultSerializable { + + import UnrolledBuffer.Unrolled + + @transient private var headptr = newUnrolled + @transient private var lastptr = headptr + @transient private var sz = 0 + + private[collection] def headPtr = headptr + private[collection] def headPtr_=(head: Unrolled[T]) = headptr = head + private[collection] def lastPtr = lastptr + private[collection] def lastPtr_=(last: Unrolled[T]) = lastptr = last + private[collection] def size_=(s: Int) = sz = s + + protected def evidenceIterableFactory: UnrolledBuffer.type = UnrolledBuffer + protected def iterableEvidence: ClassTag[T] = tag + + override def iterableFactory: SeqFactory[UnrolledBuffer] = UnrolledBuffer.untagged + + protected def newUnrolled = new Unrolled[T](this) + + // The below would allow more flexible behavior without requiring inheritance + // that is risky because all the important internals are private. + // private var myLengthPolicy: Int => Int = x => x + // + // /** Specifies how the array lengths should vary. + // * + // * By default, `UnrolledBuffer` uses arrays of a fixed size. A length + // * policy can be given that changes this scheme to, for instance, an + // * exponential growth. + // * + // * @param nextLength computes the length of the next array from the length of the latest one + // */ + // def setLengthPolicy(nextLength: Int => Int): Unit = { myLengthPolicy = nextLength } + private[collection] def calcNextLength(sz: Int) = sz // myLengthPolicy(sz) + + def classTagCompanion = UnrolledBuffer + + /** Concatenates the target unrolled buffer to this unrolled buffer. + * + * The specified buffer `that` is cleared after this operation. This is + * an O(1) operation. + * + * @param that the unrolled buffer whose elements are added to this buffer + */ + def concat(that: UnrolledBuffer[T]) = { + // bind the two together + if (!lastptr.bind(that.headptr)) lastptr = that.lastPtr + + // update size + sz += that.sz + + // `that` is no longer usable, so clear it + // here we rely on the fact that `clear` allocates + // new nodes instead of modifying the previous ones + that.clear() + + // return a reference to this + this + } + + def addOne(elem: T) = { + lastptr = lastptr.append(elem) + sz += 1 + this + } + + def clear(): Unit = { + headptr = newUnrolled + lastptr = headptr + sz = 0 + } + + def iterator: Iterator[T] = new AbstractIterator[T] { + var pos: Int = -1 + var node: Unrolled[T] = headptr + scan() + + private def scan(): Unit = { + pos += 1 + while (pos >= node.size) { + pos = 0 + node = node.next + if (node eq null) return + } + } + def hasNext = node ne null + def next() = if (hasNext) { + val r = node.array(pos) + scan() + r + } else Iterator.empty.next() + } + + // this should be faster than the iterator + override def foreach[U](f: T => U) = headptr.foreach(f) + + def result() = this + + def length = sz + + override def knownSize: Int = sz + + def apply(idx: Int) = + if (idx >= 0 && idx < sz) headptr(idx) + else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") + + def update(idx: Int, newelem: T) = + if (idx >= 0 && idx < sz) headptr(idx) = newelem + else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") + + def mapInPlace(f: T => T): this.type = { + headptr.mapInPlace(f) + this + } + + def remove(idx: Int) = + if (idx >= 0 && idx < sz) { + sz -= 1 + headptr.remove(idx, this) + } else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") + + @tailrec final def remove(idx: Int, count: Int): Unit = + if (count > 0) { + remove(idx) + remove(idx, count-1) + } + + def prepend(elem: T) = { + headptr = headptr prepend elem + sz += 1 + this + } + + def insert(idx: Int, elem: T): Unit = + insertAll(idx, elem :: Nil) + + def insertAll(idx: Int, elems: IterableOnce[T]): Unit = + if (idx >= 0 && idx <= sz) { + sz += headptr.insertAll(idx, elems, this) + } else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") + + override def subtractOne(elem: T): this.type = { + if (headptr.subtractOne(elem, this)) { + sz -= 1 + } + this + } + + def patchInPlace(from: Int, patch: collection.IterableOnce[T], replaced: Int): this.type = { + remove(from, replaced) + insertAll(from, patch) + this + } + + private def writeObject(out: java.io.ObjectOutputStream): Unit = { + out.defaultWriteObject + out writeInt sz + for (elem <- this) out writeObject elem + } + + private def readObject(in: java.io.ObjectInputStream): Unit = { + in.defaultReadObject + + val num = in.readInt + + headPtr = newUnrolled + lastPtr = headPtr + sz = 0 + var i = 0 + while (i < num) { + this += in.readObject.asInstanceOf[T] + i += 1 + } + } + + override def clone(): UnrolledBuffer[T] = new UnrolledBuffer[T] ++= this + + override protected[this] def className = "UnrolledBuffer" +} + + +@SerialVersionUID(3L) +object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] { self => + + val untagged: SeqFactory[UnrolledBuffer] = new ClassTagSeqFactory.AnySeqDelegate(self) + + def empty[A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] + + def from[A : ClassTag](source: scala.collection.IterableOnce[A]): UnrolledBuffer[A] = newBuilder[A].addAll(source) + + def newBuilder[A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] + + final val waterline: Int = 50 + + final def waterlineDenom: Int = 100 + + @deprecated("Use waterlineDenom instead.", "2.13.0") + final val waterlineDelim: Int = waterlineDenom + + private[collection] val unrolledlength = 32 + + /** Unrolled buffer node. + */ + class Unrolled[T: ClassTag] private[collection] (var size: Int, var array: Array[T], var next: Unrolled[T], val buff: UnrolledBuffer[T] = null) { + private[collection] def this() = this(0, new Array[T](unrolledlength), null, null) + private[collection] def this(b: UnrolledBuffer[T]) = this(0, new Array[T](unrolledlength), null, b) + + private def nextlength = if (buff eq null) unrolledlength else buff.calcNextLength(array.length) + + // adds and returns itself or the new unrolled if full + @tailrec final def append(elem: T): Unrolled[T] = if (size < array.length) { + array(size) = elem + size += 1 + this + } else { + next = new Unrolled[T](0, new Array[T](nextlength), null, buff) + next append elem + } + def foreach[U](f: T => U): Unit = { + var unrolled = this + var i = 0 + while (unrolled ne null) { + val chunkarr = unrolled.array + val chunksz = unrolled.size + while (i < chunksz) { + val elem = chunkarr(i) + f(elem) + i += 1 + } + i = 0 + unrolled = unrolled.next + } + } + def mapInPlace(f: T => T): Unit = { + var unrolled = this + var i = 0 + while (unrolled ne null) { + val chunkarr = unrolled.array + val chunksz = unrolled.size + while (i < chunksz) { + val elem = chunkarr(i) + chunkarr(i) = f(elem) + i += 1 + } + i = 0 + unrolled = unrolled.next + } + } + @tailrec final def apply(idx: Int): T = + if (idx < size) array(idx) else next.apply(idx - size) + @tailrec final def update(idx: Int, newelem: T): Unit = + if (idx < size) array(idx) = newelem else next.update(idx - size, newelem) + @tailrec final def locate(idx: Int): Unrolled[T] = + if (idx < size) this else next.locate(idx - size) + def prepend(elem: T) = if (size < array.length) { + // shift the elements of the array right + // then insert the element + shiftright() + array(0) = elem + size += 1 + this + } else { + // allocate a new node and store element + // then make it point to this + val newhead = new Unrolled[T](buff) + newhead append elem + newhead.next = this + newhead + } + // shifts right assuming enough space + private def shiftright(): Unit = { + var i = size - 1 + while (i >= 0) { + array(i + 1) = array(i) + i -= 1 + } + } + // returns pointer to new last if changed + @tailrec final def remove(idx: Int, buffer: UnrolledBuffer[T]): T = + if (idx < size) { + // remove the element + // then try to merge with the next bucket + val r = array(idx) + shiftleft(idx) + size -= 1 + if (tryMergeWithNext()) buffer.lastPtr = this + r + } else next.remove(idx - size, buffer) + + @tailrec final def subtractOne(elem: T, buffer: UnrolledBuffer[T]): Boolean = { + var i = 0 + while (i < size) { + if(array(i) == elem) { + remove(i, buffer) + return true + } + i += 1 + } + if(next ne null) next.subtractOne(elem, buffer) else false + } + + // shifts left elements after `leftb` (overwrites `leftb`) + private def shiftleft(leftb: Int): Unit = { + var i = leftb + while (i < (size - 1)) { + array(i) = array(i + 1) + i += 1 + } + nullout(i, i + 1) + } + protected def tryMergeWithNext() = if (next != null && (size + next.size) < (array.length * waterline / waterlineDenom)) { + // copy the next array, then discard the next node + Array.copy(next.array, 0, array, size, next.size) + size = size + next.size + next = next.next + if (next eq null) true else false // checks if last node was thrown out + } else false + + @tailrec final def insertAll(idx: Int, t: scala.collection.IterableOnce[T], buffer: UnrolledBuffer[T]): Int = { + if (idx < size) { + // divide this node at the appropriate position and insert all into head + // update new next + val newnextnode = new Unrolled[T](0, new Array(array.length), null, buff) + Array.copy(array, idx, newnextnode.array, 0, size - idx) + newnextnode.size = size - idx + newnextnode.next = next + + // update this + nullout(idx, size) + size = idx + next = null + + // insert everything from iterable to this + var curr = this + var appended = 0 + for (elem <- t.iterator) { + curr = curr append elem + appended += 1 + } + curr.next = newnextnode + + // try to merge the last node of this with the newnextnode and fix tail pointer if needed + if (curr.tryMergeWithNext()) buffer.lastPtr = curr + else if (newnextnode.next eq null) buffer.lastPtr = newnextnode + appended + } + else if (idx == size || (next eq null)) { + var curr = this + var appended = 0 + for (elem <- t.iterator) { + curr = curr append elem + appended += 1 + } + appended + } + else next.insertAll(idx - size, t, buffer) + } + + private def nullout(from: Int, until: Int): Unit = { + var idx = from + while (idx < until) { + array(idx) = null.asInstanceOf[T] // TODO find a way to assign a default here!! + idx += 1 + } + } + + // assumes this is the last node + // `thathead` and `thatlast` are head and last node + // of the other unrolled list, respectively + def bind(thathead: Unrolled[T]) = { + assert(next eq null) + next = thathead + tryMergeWithNext() + } + + override def toString: String = + array.take(size).mkString("Unrolled@%08x".format(System.identityHashCode(this)) + "[" + size + "/" + array.length + "](", ", ", ")") + " -> " + (if (next ne null) next.toString else "") + } +} + +// This is used by scala.collection.parallel.mutable.UnrolledParArrayCombiner: +// Todo -- revisit whether inheritance is the best way to achieve this functionality +private[collection] class DoublingUnrolledBuffer[T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) { + override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz + override protected def newUnrolled = new UnrolledBuffer.Unrolled[T](0, new Array[T](4), null, this) +} diff --git a/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala b/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala new file mode 100644 index 000000000000..7286a318e1f9 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala @@ -0,0 +1,55 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +import scala.annotation.nowarn +import scala.collection.convert.JavaCollectionWrappers.{JMapWrapper, JMapWrapperLike} + +/** A hash map with references to entries which are weakly reachable. Entries are + * removed from this map when the key is no longer (strongly) referenced. This class wraps + * `java.util.WeakHashMap`. + * + * @tparam K type of keys contained in this map + * @tparam V type of values associated with the keys + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#weak-hash-maps "Scala's Collection Library overview"]] + * section on `Weak Hash Maps` for more information. + * + * @define Coll `WeakHashMap` + * @define coll weak hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(3L) +class WeakHashMap[K, V] extends JMapWrapper[K, V](new java.util.WeakHashMap) + with JMapWrapperLike[K, V, WeakHashMap, WeakHashMap[K, V]] + with MapFactoryDefaults[K, V, WeakHashMap, Iterable] { + override def empty = new WeakHashMap[K, V] + override def mapFactory: MapFactory[WeakHashMap] = WeakHashMap + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix = "WeakHashMap" +} + +/** $factoryInfo + * @define Coll `WeakHashMap` + * @define coll weak hash map + */ +@SerialVersionUID(3L) +object WeakHashMap extends MapFactory[WeakHashMap] { + def empty[K, V]: WeakHashMap[K,V] = new WeakHashMap[K, V] + def from[K, V](it: collection.IterableOnce[(K, V)]): WeakHashMap[K,V] = Growable.from(empty[K, V], it) + def newBuilder[K, V]: Builder[(K, V), WeakHashMap[K,V]] = new GrowableBuilder(WeakHashMap.empty[K, V]) +} + diff --git a/tests/pos-special/stdlib/collection/mutable/package.scala b/tests/pos-special/stdlib/collection/mutable/package.scala new file mode 100644 index 000000000000..4915e8a48b22 --- /dev/null +++ b/tests/pos-special/stdlib/collection/mutable/package.scala @@ -0,0 +1,41 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + + +package object mutable { + @deprecated("Use ArraySeq instead of WrappedArray; it can represent both, boxed and unboxed arrays", "2.13.0") + type WrappedArray[X] = ArraySeq[X] + @deprecated("Use ArraySeq instead of WrappedArray; it can represent both, boxed and unboxed arrays", "2.13.0") + val WrappedArray = ArraySeq + @deprecated("Use Iterable instead of Traversable", "2.13.0") + type Traversable[X] = Iterable[X] + @deprecated("Use Iterable instead of Traversable", "2.13.0") + val Traversable = Iterable + @deprecated("Use Stack instead of ArrayStack; it now uses an array-based implementation", "2.13.0") + type ArrayStack[X] = Stack[X] + @deprecated("Use Stack instead of ArrayStack; it now uses an array-based implementation", "2.13.0") + val ArrayStack = Stack + + @deprecated("mutable.LinearSeq has been removed; use LinearSeq with mutable.Seq instead", "2.13.0") + type LinearSeq[X] = Seq[X] with scala.collection.LinearSeq[X] + + @deprecated("GrowingBuilder has been renamed to GrowableBuilder", "2.13.0") + type GrowingBuilder[Elem, To <: Growable[Elem]] = GrowableBuilder[Elem, To] + + @deprecated("IndexedOptimizedSeq has been renamed to IndexedSeq", "2.13.0") + type IndexedOptimizedSeq[A] = IndexedSeq[A] + + @deprecated("IndexedOptimizedBuffer has been renamed to IndexedBuffer", "2.13.0") + type IndexedOptimizedBuffer[A] = IndexedBuffer[A] +} diff --git a/tests/pos-special/stdlib/collection/package.scala b/tests/pos-special/stdlib/collection/package.scala new file mode 100644 index 000000000000..954573ff1ddd --- /dev/null +++ b/tests/pos-special/stdlib/collection/package.scala @@ -0,0 +1,80 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala + +package object collection { + @deprecated("Use Iterable instead of Traversable", "2.13.0") + type Traversable[+X] = Iterable[X] + @deprecated("Use Iterable instead of Traversable", "2.13.0") + val Traversable = Iterable + @deprecated("Use IterableOnce instead of TraversableOnce", "2.13.0") + type TraversableOnce[+X] = IterableOnce[X] + @deprecated("Use IterableOnce instead of TraversableOnce", "2.13.0") + val TraversableOnce = IterableOnce + @deprecated("Use SeqOps instead of SeqLike", "2.13.0") + type SeqLike[A, T] = SeqOps[A, Seq, T] + @deprecated("Use SeqOps (for the methods) or IndexedSeqOps (for fast indexed access) instead of ArrayLike", "2.13.0") + type ArrayLike[A] = SeqOps[A, Seq, Seq[A]] + + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenTraversableOnce[+X] = IterableOnce[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenTraversableOnce = IterableOnce + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenTraversable[+X] = Iterable[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenTraversable = Iterable + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenIterable[+X] = Iterable[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenIterable = Iterable + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenSeq[+X] = Seq[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenSeq = Seq + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenSet[X] = Set[X] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenSet = Set + @deprecated("Gen* collection types have been removed", "2.13.0") + type GenMap[K, +V] = Map[K, V] + @deprecated("Gen* collection types have been removed", "2.13.0") + val GenMap = Map + + /** Needed to circumvent a difficulty between dotty and scalac concerning + * the right top type for a type parameter of kind * -> *. + * In Scalac, we can provide `Any`, as `Any` is kind-polymorphic. In dotty this is not allowed. + * In dotty, we can provide `[X] => Any`. But Scalac does not know lambda syntax. + */ + private[scala] type AnyConstr[X] = Any + + /** An extractor used to head/tail deconstruct sequences. */ + object +: { + /** Splits a sequence into head +: tail. + * @return Some((head, tail)) if sequence is non-empty. None otherwise. + */ + def unapply[A, CC[_] <: Seq[_], C <: SeqOps[A, CC, C]](t: C with SeqOps[A, CC, C]): Option[(A, C)] = + if(t.isEmpty) None + else Some(t.head -> t.tail) + } + + /** An extractor used to init/last deconstruct sequences. */ + object :+ { + /** Splits a sequence into init :+ last. + * @return Some((init, last)) if sequence is non-empty. None otherwise. + */ + def unapply[A, CC[_] <: Seq[_], C <: SeqOps[A, CC, C]](t: C with SeqOps[A, CC, C]): Option[(C, A)] = + if(t.isEmpty) None + else Some(t.init -> t.last) + } +} From b5bfbab2eb699d064850de289c9928a1a221c903 Mon Sep 17 00:00:00 2001 From: odersky Date: Mon, 6 Nov 2023 17:54:30 +0100 Subject: [PATCH 19/19] Change stdlib collections to be compiled under capture checking --- tests/pos-special/stdlib/Test1.scala | 34 ++ tests/pos-special/stdlib/Test2.scala | 232 +++++++ .../stdlib/collection/ArrayOps.scala | 113 ++-- .../stdlib/collection/BitSet.scala | 6 +- .../stdlib/collection/BufferedIterator.scala | 2 +- .../stdlib/collection/BuildFrom.scala | 36 +- .../stdlib/collection/DefaultMap.scala | 2 +- .../stdlib/collection/Factory.scala | 106 ++-- .../stdlib/collection/Hashing.scala | 1 + .../stdlib/collection/IndexedSeq.scala | 9 +- .../stdlib/collection/IndexedSeqView.scala | 85 +-- .../stdlib/collection/Iterable.scala | 157 ++--- .../stdlib/collection/IterableOnce.scala | 145 ++--- .../stdlib/collection/Iterator.scala | 148 ++--- .../stdlib/collection/JavaConverters.scala | 1 + .../stdlib/collection/LazyZipOps.scala | 21 +- .../stdlib/collection/LinearSeq.scala | 9 +- tests/pos-special/stdlib/collection/Map.scala | 36 +- .../stdlib/collection/MapView.scala | 69 ++- .../stdlib/collection/Searching.scala | 1 + tests/pos-special/stdlib/collection/Seq.scala | 52 +- .../stdlib/collection/SeqMap.scala | 1 + .../stdlib/collection/SeqView.scala | 105 ++-- tests/pos-special/stdlib/collection/Set.scala | 10 +- .../stdlib/collection/SortedMap.scala | 12 +- .../stdlib/collection/SortedOps.scala | 1 + .../stdlib/collection/SortedSet.scala | 5 +- .../stdlib/collection/Stepper.scala | 40 +- .../stdlib/collection/StepperShape.scala | 3 +- .../StrictOptimizedIterableOps.scala | 24 +- .../collection/StrictOptimizedMapOps.scala | 8 +- .../collection/StrictOptimizedSeqOps.scala | 14 +- .../collection/StrictOptimizedSetOps.scala | 1 + .../StrictOptimizedSortedMapOps.scala | 3 +- .../stdlib/collection/StringOps.scala | 37 +- .../stdlib/collection/StringParsers.scala | 17 +- .../pos-special/stdlib/collection/View.scala | 140 +++-- .../stdlib/collection/WithFilter.scala | 8 +- .../stdlib/collection/concurrent/Map.scala | 1 + .../collection/concurrent/TrieMap.scala | 39 +- .../collection/convert/AsJavaConverters.scala | 1 + .../collection/convert/AsJavaExtensions.scala | 1 + .../convert/AsScalaConverters.scala | 1 + .../convert/AsScalaExtensions.scala | 1 + .../convert/ImplicitConversions.scala | 1 + .../convert/JavaCollectionWrappers.scala | 12 +- .../collection/convert/StreamExtensions.scala | 1 + .../convert/impl/ArrayStepper.scala | 1 + .../convert/impl/BinaryTreeStepper.scala | 3 +- .../convert/impl/BitSetStepper.scala | 9 +- .../convert/impl/ChampStepper.scala | 1 + .../convert/impl/InOrderStepperBase.scala | 1 + .../convert/impl/IndexedSeqStepper.scala | 1 + .../convert/impl/IndexedStepperBase.scala | 1 + .../convert/impl/IteratorStepper.scala | 1 + .../convert/impl/NumericRangeStepper.scala | 1 + .../convert/impl/RangeStepper.scala | 3 +- .../convert/impl/StringStepper.scala | 1 + .../convert/impl/TableStepper.scala | 1 + .../convert/impl/VectorStepper.scala | 7 +- .../collection/generic/BitOperations.scala | 1 + .../generic/DefaultSerializationProxy.scala | 5 +- .../collection/generic/IsIterable.scala | 1 + .../collection/generic/IsIterableOnce.scala | 1 + .../stdlib/collection/generic/IsMap.scala | 1 + .../stdlib/collection/generic/IsSeq.scala | 41 +- .../collection/generic/Subtractable.scala | 1 + .../stdlib/collection/generic/package.scala | 1 + .../collection/immutable/ArraySeq.scala | 53 +- .../stdlib/collection/immutable/BitSet.scala | 5 +- .../collection/immutable/ChampCommon.scala | 3 +- .../stdlib/collection/immutable/HashMap.scala | 22 +- .../stdlib/collection/immutable/HashSet.scala | 22 +- .../stdlib/collection/immutable/IntMap.scala | 18 +- .../collection/immutable/Iterable.scala | 4 +- ...{LazyList.scala => LazyListIterable.scala} | 499 ++++++++------- .../stdlib/collection/immutable/List.scala | 25 +- .../stdlib/collection/immutable/ListMap.scala | 12 +- .../stdlib/collection/immutable/ListSet.scala | 4 +- .../stdlib/collection/immutable/LongMap.scala | 16 +- .../stdlib/collection/immutable/Map.scala | 32 +- .../collection/immutable/NumericRange.scala | 4 +- .../stdlib/collection/immutable/Queue.scala | 7 +- .../stdlib/collection/immutable/Range.scala | 5 +- .../collection/immutable/RedBlackTree.scala | 7 +- .../stdlib/collection/immutable/Seq.scala | 12 +- .../stdlib/collection/immutable/SeqMap.scala | 10 +- .../stdlib/collection/immutable/Set.scala | 12 +- .../collection/immutable/SortedMap.scala | 13 +- .../collection/immutable/SortedSet.scala | 3 +- .../stdlib/collection/immutable/Stream.scala | 568 ------------------ .../immutable/StrictOptimizedSeqOps.scala | 8 +- .../stdlib/collection/immutable/TreeMap.scala | 16 +- .../collection/immutable/TreeSeqMap.scala | 26 +- .../stdlib/collection/immutable/TreeSet.scala | 9 +- .../stdlib/collection/immutable/Vector.scala | 54 +- .../collection/immutable/VectorMap.scala | 8 +- .../collection/immutable/WrappedString.scala | 18 +- .../stdlib/collection/immutable/package.scala | 2 +- .../stdlib/collection/mutable/AnyRefMap.scala | 60 +- .../collection/mutable/ArrayBuffer.scala | 35 +- .../collection/mutable/ArrayBuilder.scala | 7 +- .../collection/mutable/ArrayDeque.scala | 19 +- .../stdlib/collection/mutable/ArraySeq.scala | 26 +- .../stdlib/collection/mutable/BitSet.scala | 9 +- .../stdlib/collection/mutable/Buffer.scala | 22 +- .../stdlib/collection/mutable/Builder.scala | 14 +- .../mutable/CheckedIndexedSeqView.scala | 63 +- .../stdlib/collection/mutable/Cloneable.scala | 2 +- .../mutable/CollisionProofHashMap.scala | 35 +- .../stdlib/collection/mutable/Growable.scala | 8 +- .../collection/mutable/GrowableBuilder.scala | 4 +- .../stdlib/collection/mutable/HashMap.scala | 19 +- .../stdlib/collection/mutable/HashSet.scala | 19 +- .../stdlib/collection/mutable/HashTable.scala | 5 +- .../collection/mutable/ImmutableBuilder.scala | 1 + .../collection/mutable/IndexedSeq.scala | 1 + .../stdlib/collection/mutable/Iterable.scala | 5 +- .../collection/mutable/LinkedHashMap.scala | 11 +- .../collection/mutable/LinkedHashSet.scala | 11 +- .../collection/mutable/ListBuffer.scala | 20 +- .../stdlib/collection/mutable/ListMap.scala | 9 +- .../stdlib/collection/mutable/LongMap.scala | 51 +- .../stdlib/collection/mutable/Map.scala | 13 +- .../stdlib/collection/mutable/MultiMap.scala | 3 +- .../collection/mutable/MutationTracker.scala | 3 +- .../collection/mutable/OpenHashMap.scala | 11 +- .../collection/mutable/PriorityQueue.scala | 17 +- .../stdlib/collection/mutable/Queue.scala | 9 +- .../collection/mutable/RedBlackTree.scala | 33 +- .../collection/mutable/ReusableBuilder.scala | 1 + .../stdlib/collection/mutable/Seq.scala | 1 + .../stdlib/collection/mutable/SeqMap.scala | 1 + .../stdlib/collection/mutable/Set.scala | 1 + .../collection/mutable/Shrinkable.scala | 5 +- .../stdlib/collection/mutable/SortedMap.scala | 9 +- .../stdlib/collection/mutable/SortedSet.scala | 1 + .../stdlib/collection/mutable/Stack.scala | 10 +- .../collection/mutable/StringBuilder.scala | 9 +- .../stdlib/collection/mutable/TreeMap.scala | 9 +- .../stdlib/collection/mutable/TreeSet.scala | 9 +- .../collection/mutable/UnrolledBuffer.scala | 19 +- .../collection/mutable/WeakHashMap.scala | 9 +- .../stdlib/collection/mutable/package.scala | 1 + .../stdlib/collection/package.scala | 1 + 145 files changed, 1983 insertions(+), 1980 deletions(-) create mode 100644 tests/pos-special/stdlib/Test1.scala create mode 100644 tests/pos-special/stdlib/Test2.scala rename tests/pos-special/stdlib/collection/immutable/{LazyList.scala => LazyListIterable.scala} (66%) delete mode 100644 tests/pos-special/stdlib/collection/immutable/Stream.scala diff --git a/tests/pos-special/stdlib/Test1.scala b/tests/pos-special/stdlib/Test1.scala new file mode 100644 index 000000000000..9ee4e7cfa6a1 --- /dev/null +++ b/tests/pos-special/stdlib/Test1.scala @@ -0,0 +1,34 @@ +import language.experimental.captureChecking +import collection.{View, Seq} +import collection.mutable.{ArrayBuffer, ListBuffer} + +import java.io.* + +object Test0: + + def usingLogFile[sealed T](op: FileOutputStream^ => T): T = + val logFile = FileOutputStream("log") + val result = op(logFile) + logFile.close() + result + + def test(xs: List[Int]) = + usingLogFile: f => + xs.map: x => + f.write(x) + x * x + +object Test1: + def test(it: Iterator[Int]^, v: View[Int]^) = + val isEven: Int ->{cap[test]} Boolean = _ % 2 == 0 + val it2 = it.filter(isEven) + val _: Iterator[Int]^{it, isEven} = it2 + val it2c: Iterator[Int]^{it2} = it2 + val v2 = v.filter(isEven) + val _: View[Int]^{v, isEven} = v2 + val v2c: View[Int]^{v2} = v2 + val v3 = v.drop(2) + val _: View[Int]^{v} = v3 + val v3c: View[Int]^{v3} = v3 + val (xs6, xs7) = v.partition(isEven) + val (xs6a, xs7a) = v.partition(_ % 2 == 0) diff --git a/tests/pos-special/stdlib/Test2.scala b/tests/pos-special/stdlib/Test2.scala new file mode 100644 index 000000000000..a59da522b183 --- /dev/null +++ b/tests/pos-special/stdlib/Test2.scala @@ -0,0 +1,232 @@ +import scala.reflect.ClassTag +import language.experimental.captureChecking +import collection.{View, Seq} +import collection.mutable.{ArrayBuffer, ListBuffer} + +object Test { + + def seqOps(xs: Seq[Int]) = { // try with Seq[Int]^{cap} + val strPlusInt: (String, Int) => String = _ + _ + val intPlusStr: (Int, String) => String = _ + _ + val isEven: Int => Boolean = _ % 2 == 0 + val isNonNeg: Int => Boolean = _ > 0 + val flips: Int => List[Int] = x => x :: -x :: Nil + val x1 = xs.foldLeft("")(strPlusInt) + val y1: String = x1 + val x2 = xs.foldRight("")(intPlusStr) + val y2: String = x2 + val x3 = xs.indexWhere(isEven) + val y3: Int = x3 + val x4 = xs.head + val y4: Int = x4 + val x5 = xs.to(List) + val y5: List[Int] = x5 + val (xs6, xs7) = xs.partition(isEven) + val ys6: Seq[Int] = xs6 + val ys7: Seq[Int] = xs7 + val xs8 = xs.drop(2) + val ys8: Seq[Int] = xs8 + val xs9 = xs.map(isNonNeg) + val ys9: Seq[Boolean] = xs9 + val xs10 = xs.flatMap(flips) + val ys10: Seq[Int] = xs10 + val xs11 = xs ++ xs + val ys11: Seq[Int] = xs11 + val xs12 = xs ++ Nil + val ys12: Seq[Int] = xs12 + val xs13 = Nil ++ xs + val ys13: Seq[Int] = xs13 + val xs14 = xs ++ ("a" :: Nil) + val ys14: Seq[Any] = xs14 + val xs15 = xs.zip(xs9) + val ys15: Seq[(Int, Boolean)] = xs15 + val xs16 = xs.reverse + val ys16: Seq[Int] = xs16 + println("-------") + println(x1) + println(x2) + println(x3) + println(x4) + println(x5) + println(xs6) + println(xs7) + println(xs8) + println(xs9) + println(xs10) + println(xs11) + println(xs12) + println(xs13) + println(xs14) + println(xs15) + println(xs16) + } + + def iterOps(xs: => Iterator[Int]^) = + val strPlusInt: (String, Int) => String = _ + _ + val intPlusStr: (Int, String) => String = _ + _ + val isEven: Int ->{cap[iterOps]} Boolean = _ % 2 == 0 + val isNonNeg: Int => Boolean = _ > 0 + val flips: Int => List[Int] = x => x :: -x :: Nil + val x1 = xs.foldLeft("")(strPlusInt) + val y1: String = x1 + val x2 = xs.foldRight("")(intPlusStr) + val y2: String = x2 + val x4 = xs.next() + val y4: Int = x4 + val x5 = xs.to(List) + val y5: List[Int] = x5 + val (xs6, xs7) = xs.partition(isEven) + val ys6: Iterator[Int]^{xs6, isEven} = xs6 + val ys7: Iterator[Int]^{xs7, isEven} = xs7 + val (xs6a, xs7a) = xs.partition(_ % 2 == 0) + val ys6a: Iterator[Int]^{xs6} = xs6 + val ys7a: Iterator[Int]^{xs7} = xs7 + val xs8 = xs.drop(2) + val ys8: Iterator[Int]^{xs8} = xs8 + val xs9 = xs.map(isNonNeg) + val ys9: Iterator[Boolean]^{xs9} = xs9 + val xs10 = xs.flatMap(flips) + val ys10: Iterator[Int]^{xs10} = xs10 + val xs11 = xs ++ xs + val ys11: Iterator[Int]^{xs11} = xs11 + val xs12 = xs ++ Nil + val ys12: Iterator[Int]^{xs12} = xs12 + val xs13 = Nil ++ xs + val ys13: List[Int] = xs13 + val xs14 = xs ++ ("a" :: Nil) + val ys14: Iterator[Any]^{xs14} = xs14 + val xs15 = xs.zip(xs9) + val ys15: Iterator[(Int, Boolean)]^{xs15} = xs15 + println("-------") + println(x1) + println(x2) + println(x4) + println(x5) + println(xs6.to(List)) + println(xs7.to(List)) + println(xs8.to(List)) + println(xs9.to(List)) + println(xs10.to(List)) + println(xs11.to(List)) + println(xs12.to(List)) + println(xs13.to(List)) + println(xs14.to(List)) + println(xs15.to(List)) + + def viewOps(xs: View[Int]^) = { + val strPlusInt: (String, Int) => String = _ + _ + val intPlusStr: (Int, String) => String = _ + _ + val isEven: Int ->{cap[viewOps]} Boolean = _ % 2 == 0 + val isNonNeg: Int => Boolean = _ > 0 + val flips: Int => List[Int] = x => x :: -x :: Nil + val x1 = xs.foldLeft("")(strPlusInt) + val y1: String = x1 + val x2 = xs.foldRight("")(intPlusStr) + val y2: String = x2 + //val x3 = xs.indexWhere(_ % 2 == 0) // indexWhere does not exist on View + //val y3: Int = x3 + val x4 = xs.head + val y4: Int = x4 + val x5 = xs.to(List) + val y5: List[Int] = x5 + val (xs6, xs7) = xs.partition(isEven) + val ys6: View[Int]^{xs6, isEven} = xs6 + val ys7: View[Int]^{xs7, isEven} = xs7 + val (xs6a, xs7a) = xs.partition(_ % 2 == 0) + val ys6a: View[Int]^{xs6} = xs6 + val ys7a: View[Int]^{xs7} = xs7 + val xs8 = xs.drop(2) + val ys8: View[Int]^{xs8} = xs8 + val xs9 = xs.map(isNonNeg) + val ys9: View[Boolean]^{xs9} = xs9 + val xs10 = xs.flatMap(flips) + val ys10: View[Int]^{xs10} = xs10 + val xs11 = xs ++ xs + val ys11: View[Int]^{xs11} = xs11 + val xs12 = xs ++ Nil + val ys12: View[Int]^{xs12} = xs12 + val xs13 = Nil ++ xs + val ys13: List[Int] = xs13 + val xs14 = xs ++ ("a" :: Nil) + val ys14: View[Any]^{xs14} = xs14 + val xs15 = xs.zip(xs9) + val ys15: View[(Int, Boolean)]^{xs15} = xs15 + println("-------") + println(x1) + println(x2) + println(x4) + println(x5) + println(xs6.to(List)) + println(xs7.to(List)) + println(xs8.to(List)) + println(xs9.to(List)) + println(xs10.to(List)) + println(xs11.to(List)) + println(xs12.to(List)) + println(xs13.to(List)) + println(xs14.to(List)) + println(xs15.to(List)) + } + + def stringOps(xs: String) = { + val x1 = xs.foldLeft("")(_ + _) + val y1: String = x1 + val x2 = xs.foldRight("")(_ + _) + val y2: String = x2 + val x3 = xs.indexWhere(_ % 2 == 0) + val y3: Int = x3 + val x4 = xs.head + val y4: Int = x4 + val x5 = xs.to(List) + val y5: List[Char] = x5 + val (xs6, xs7) = xs.partition(_ % 2 == 0) + val ys6: String = xs6 + val ys7: String = xs7 + val xs8 = xs.drop(2) + val ys8: String = xs8 + val xs9 = xs.map(_ + 1) + val ys9: Seq[Int] = xs9 + val xs9a = xs.map(_.toUpper) + val ys9a: String = xs9a + val xs10 = xs.flatMap((x: Char) => s"$x,$x") + val ys10: String = xs10 + val xs11 = xs ++ xs + val ys11: String = xs11 + val ops = collection.StringOps(xs) // !!! otherwise we can a "cannot establish reference" + val xs13 = Nil ++ ops.iterator + val ys13: List[Char] = xs13 + val xs14 = xs ++ ("xyz" :: Nil) + val ys14: Seq[Any] = xs14 + val xs15 = xs.zip(xs9) + val ys15: Seq[(Char, Int)] = xs15 + println("-------") + println(x1) + println(x2) + println(x3) + println(x4) + println(x5) + println(xs6) + println(xs7) + println(xs8) + println(xs9) + println(xs9a) + println(xs10) + println(xs11) + println(xs13) + println(xs14) + println(xs15) + } + + def main(args: Array[String]) = { + val ints = List(1, 2, 3) + val intsBuf = ints.to(ArrayBuffer) + val intsListBuf = ints.to(ListBuffer) + val intsView = ints.view + seqOps(ints) + seqOps(intsBuf) + seqOps(intsListBuf) + viewOps(intsView) + iterOps(ints.iterator) + stringOps("abc") + } +} diff --git a/tests/pos-special/stdlib/collection/ArrayOps.scala b/tests/pos-special/stdlib/collection/ArrayOps.scala index 485427886625..a52fd0dbd162 100644 --- a/tests/pos-special/stdlib/collection/ArrayOps.scala +++ b/tests/pos-special/stdlib/collection/ArrayOps.scala @@ -15,6 +15,7 @@ package collection import java.lang.Math.{max, min} import java.util.Arrays +import language.experimental.captureChecking import scala.Predef.{ // unimport all array-related implicit conversions to avoid triggering them accidentally genericArrayOps => _, @@ -53,14 +54,14 @@ import scala.util.Sorting object ArrayOps { @SerialVersionUID(3L) - private class ArrayView[A](xs: Array[A]) extends AbstractIndexedSeqView[A] { + private class ArrayView[sealed A](xs: Array[A]) extends AbstractIndexedSeqView[A] { def length = xs.length def apply(n: Int) = xs(n) override def toString: String = immutable.ArraySeq.unsafeWrapArray(xs).mkString("ArrayView(", ", ", ")") } /** A lazy filtered array. No filtering is applied until one of `foreach`, `map` or `flatMap` is called. */ - class WithFilter[A](p: A => Boolean, xs: Array[A]) { + class WithFilter[sealed A](p: A => Boolean, xs: Array[A]) { /** Apply `f` to each element for its side effects. * Note: [U] parameter needed to help scalac's type inference. @@ -82,7 +83,7 @@ object ArrayOps { * @return a new array resulting from applying the given function * `f` to each element of this array and collecting the results. */ - def map[B: ClassTag](f: A => B): Array[B] = { + def map[sealed B: ClassTag](f: A => B): Array[B] = { val b = ArrayBuilder.make[B] var i = 0 while (i < xs.length) { @@ -101,7 +102,7 @@ object ArrayOps { * @return a new array resulting from applying the given collection-valued function * `f` to each element of this array and concatenating the results. */ - def flatMap[B: ClassTag](f: A => IterableOnce[B]): Array[B] = { + def flatMap[sealed B: ClassTag](f: A => IterableOnce[B]): Array[B] = { val b = ArrayBuilder.make[B] var i = 0 while(i < xs.length) { @@ -112,15 +113,15 @@ object ArrayOps { b.result() } - def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + def flatMap[BS, sealed B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = flatMap[B](x => asIterable(f(x))) /** Creates a new non-strict filter which combines this filter with the given predicate. */ - def withFilter(q: A => Boolean): WithFilter[A] = new WithFilter[A](a => p(a) && q(a), xs) + def withFilter(q: A => Boolean): WithFilter[A]^{this, q} = new WithFilter[A](a => p(a) && q(a), xs) } @SerialVersionUID(3L) - private[collection] final class ArrayIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private[collection] final class ArrayIterator[@specialized(Specializable.Everything) sealed A](xs: Array[A]) extends AbstractIterator[A] with Serializable { private[this] var pos = 0 private[this] val len = xs.length override def knownSize: Int = len - pos @@ -143,7 +144,7 @@ object ArrayOps { } @SerialVersionUID(3L) - private final class ReverseIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private final class ReverseIterator[@specialized(Specializable.Everything) sealed A](xs: Array[A]) extends AbstractIterator[A] with Serializable { private[this] var pos = xs.length-1 def hasNext: Boolean = pos >= 0 def next(): A = { @@ -160,7 +161,7 @@ object ArrayOps { } @SerialVersionUID(3L) - private final class GroupedIterator[A](xs: Array[A], groupSize: Int) extends AbstractIterator[Array[A]] with Serializable { + private final class GroupedIterator[sealed A](xs: Array[A], groupSize: Int) extends AbstractIterator[Array[A]] with Serializable { private[this] var pos = 0 def hasNext: Boolean = pos < xs.length def next(): Array[A] = { @@ -196,7 +197,7 @@ object ArrayOps { * * @tparam A type of the elements contained in this array. */ -final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { +final class ArrayOps[sealed A](private val xs: Array[A]) extends AnyVal { @`inline` private[this] implicit def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) @@ -366,7 +367,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { def inits: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).init) // A helper for tails and inits. - private[this] def iterateUntilEmpty(f: Array[A] => Array[A]): Iterator[Array[A]] = + private[this] def iterateUntilEmpty(f: Array[A] => Array[A]): Iterator[Array[A]]^{f} = Iterator.iterate(xs)(f).takeWhile(x => x.length != 0) ++ Iterator.single(Array.empty[A]) /** An array containing the first `n` elements of this array. */ @@ -504,7 +505,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * * @return a pair of arrays: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], * and the second one made of those wrapped in [[scala.util.Right]]. */ - def partitionMap[A1: ClassTag, A2: ClassTag](f: A => Either[A1, A2]): (Array[A1], Array[A2]) = { + def partitionMap[sealed A1: ClassTag, sealed A2: ClassTag](f: A => Either[A1, A2]): (Array[A1], Array[A2]) = { val res1 = ArrayBuilder.make[A1] val res2 = ArrayBuilder.make[A2] var i = 0 @@ -663,7 +664,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * All these operations apply to those elements of this array * which satisfy the predicate `p`. */ - def withFilter(p: A => Boolean): ArrayOps.WithFilter[A] = new ArrayOps.WithFilter[A](p, xs) + def withFilter(p: A => Boolean): ArrayOps.WithFilter[A]^{p} = new ArrayOps.WithFilter[A](p, xs) /** Finds index of first occurrence of some value in this array after or at some start index. * @@ -776,7 +777,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * Returns `z` if this array is empty. */ def foldLeft[B](z: B)(op: (B, A) => B): B = { - def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + def f[@specialized(Specializable.Everything) sealed T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { val length = xs.length var v: Any = z var i = 0 @@ -815,7 +816,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * }}} * */ - def scanLeft[ B : ClassTag ](z: B)(op: (B, A) => B): Array[B] = { + def scanLeft[sealed B : ClassTag](z: B)(op: (B, A) => B): Array[B] = { var v = z var i = 0 val res = new Array[B](xs.length + 1) @@ -838,7 +839,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * * @return a new array containing the prefix scan of the elements in this array */ - def scan[B >: A : ClassTag](z: B)(op: (B, B) => B): Array[B] = scanLeft(z)(op) + def scan[sealed B >: A : ClassTag](z: B)(op: (B, B) => B): Array[B] = scanLeft(z)(op) /** Produces an array containing cumulative results of applying the binary * operator going right to left. @@ -854,7 +855,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * }}} * */ - def scanRight[ B : ClassTag ](z: B)(op: (A, B) => B): Array[B] = { + def scanRight[sealed B : ClassTag](z: B)(op: (A, B) => B): Array[B] = { var v = z var i = xs.length - 1 val res = new Array[B](xs.length + 1) @@ -882,7 +883,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * Returns `z` if this array is empty. */ def foldRight[B](z: B)(op: (A, B) => B): B = { - def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + def f[@specialized(Specializable.Everything) sealed T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { var v = z var i = xs.length - 1 while(i >= 0) { @@ -925,7 +926,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return a new array resulting from applying the given function * `f` to each element of this array and collecting the results. */ - def map[B](f: A => B)(implicit ct: ClassTag[B]): Array[B] = { + def map[sealed B](f: A => B)(implicit ct: ClassTag[B]): Array[B] = { val len = xs.length val ys = new Array[B](len) if(len > 0) { @@ -962,7 +963,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return a new array resulting from applying the given collection-valued function * `f` to each element of this array and concatenating the results. */ - def flatMap[B : ClassTag](f: A => IterableOnce[B]): Array[B] = { + def flatMap[sealed B : ClassTag](f: A => IterableOnce[B]): Array[B] = { val b = ArrayBuilder.make[B] var i = 0 while(i < xs.length) { @@ -972,7 +973,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { b.result() } - def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + def flatMap[BS, sealed B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = flatMap[B](x => asIterable(f(x))) /** Flattens a two-dimensional array by concatenating all its rows @@ -982,7 +983,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @param asIterable A function that converts elements of this array to rows - Iterables of type `B`. * @return An array obtained by concatenating rows of this array. */ - def flatten[B](implicit asIterable: A => IterableOnce[B], m: ClassTag[B]): Array[B] = { + def flatten[sealed B](implicit asIterable: A => IterableOnce[B], m: ClassTag[B]): Array[B] = { val b = ArrayBuilder.make[B] val len = xs.length var size = 0 @@ -1015,7 +1016,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * `pf` to each element on which it is defined and collecting the results. * The order of the elements is preserved. */ - def collect[B: ClassTag](pf: PartialFunction[A, B]): Array[B] = { + def collect[sealed B: ClassTag](pf: PartialFunction[A, B]): Array[B] = { val fallback: Any => Any = ArrayOps.fallback val b = ArrayBuilder.make[B] var i = 0 @@ -1049,7 +1050,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return a new array containing pairs consisting of corresponding elements of this array and `that`. * The length of the returned array is the minimum of the lengths of this array and `that`. */ - def zip[B](that: IterableOnce[B]): Array[(A, B)] = { + def zip[sealed B](that: IterableOnce[B]): Array[(A, B)] = { val b = new ArrayBuilder.ofRef[(A, B)]() val k = that.knownSize b.sizeHint(if(k >= 0) min(k, xs.length) else xs.length) @@ -1094,7 +1095,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * If this array is shorter than `that`, `thisElem` values are used to pad the result. * If `that` is shorter than this array, `thatElem` values are used to pad the result. */ - def zipAll[A1 >: A, B](that: Iterable[B], thisElem: A1, thatElem: B): Array[(A1, B)] = { + def zipAll[sealed A1 >: A, sealed B](that: Iterable[B], thisElem: A1, thatElem: B): Array[(A1, B)] = { val b = new ArrayBuilder.ofRef[(A1, B)]() val k = that.knownSize b.sizeHint(max(k, xs.length)) @@ -1131,26 +1132,26 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { } /** A copy of this array with an element appended. */ - def appended[B >: A : ClassTag](x: B): Array[B] = { + def appended[sealed B >: A : ClassTag](x: B): Array[B] = { val dest = Array.copyAs[B](xs, xs.length+1) dest(xs.length) = x dest } - @`inline` final def :+ [B >: A : ClassTag](x: B): Array[B] = appended(x) + @`inline` final def :+ [sealed B >: A : ClassTag](x: B): Array[B] = appended(x) /** A copy of this array with an element prepended. */ - def prepended[B >: A : ClassTag](x: B): Array[B] = { + def prepended[sealed B >: A : ClassTag](x: B): Array[B] = { val dest = new Array[B](xs.length + 1) dest(0) = x Array.copy(xs, 0, dest, 1, xs.length) dest } - @`inline` final def +: [B >: A : ClassTag](x: B): Array[B] = prepended(x) + @`inline` final def +: [sealed B >: A : ClassTag](x: B): Array[B] = prepended(x) /** A copy of this array with all elements of a collection prepended. */ - def prependedAll[B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = { + def prependedAll[sealed B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = { val b = ArrayBuilder.make[B] val k = prefix.knownSize if(k >= 0) b.sizeHint(k + xs.length) @@ -1161,18 +1162,18 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { } /** A copy of this array with all elements of an array prepended. */ - def prependedAll[B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = { + def prependedAll[sealed B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = { val dest = Array.copyAs[B](prefix, prefix.length+xs.length) Array.copy(xs, 0, dest, prefix.length, xs.length) dest } - @`inline` final def ++: [B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = prependedAll(prefix) + @`inline` final def ++: [sealed B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = prependedAll(prefix) - @`inline` final def ++: [B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = prependedAll(prefix) + @`inline` final def ++: [sealed B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = prependedAll(prefix) /** A copy of this array with all elements of a collection appended. */ - def appendedAll[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = { + def appendedAll[sealed B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = { val b = ArrayBuilder.make[B] val k = suffix.knownSize if(k >= 0) b.sizeHint(k + xs.length) @@ -1182,23 +1183,23 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { } /** A copy of this array with all elements of an array appended. */ - def appendedAll[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = { + def appendedAll[sealed B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = { val dest = Array.copyAs[B](xs, xs.length+suffix.length) Array.copy(suffix, 0, dest, xs.length, suffix.length) dest } - @`inline` final def :++ [B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + @`inline` final def :++ [sealed B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) - @`inline` final def :++ [B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + @`inline` final def :++ [sealed B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) - @`inline` final def concat[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + @`inline` final def concat[sealed B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) - @`inline` final def concat[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + @`inline` final def concat[sealed B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) - @`inline` final def ++[B >: A : ClassTag](xs: IterableOnce[B]): Array[B] = appendedAll(xs) + @`inline` final def ++[sealed B >: A : ClassTag](xs: IterableOnce[B]): Array[B] = appendedAll(xs) - @`inline` final def ++[B >: A : ClassTag](xs: Array[_ <: B]): Array[B] = appendedAll(xs) + @`inline` final def ++[sealed B >: A : ClassTag](xs: Array[_ <: B]): Array[B] = appendedAll(xs) /** Tests whether this array contains a given value as an element. * @@ -1217,7 +1218,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @param other The patch values * @param replaced The number of values in the original array that are replaced by the patch. */ - def patch[B >: A : ClassTag](from: Int, other: IterableOnce[B], replaced: Int): Array[B] = { + def patch[sealed B >: A : ClassTag](from: Int, other: IterableOnce[B], replaced: Int): Array[B] = { val b = ArrayBuilder.make[B] val k = other.knownSize val r = if(replaced < 0) 0 else replaced @@ -1243,7 +1244,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return a pair of Arrays, containing, respectively, the first and second half * of each element pair of this Array. */ - def unzip[A1, A2](implicit asPair: A => (A1, A2), ct1: ClassTag[A1], ct2: ClassTag[A2]): (Array[A1], Array[A2]) = { + def unzip[sealed A1, sealed A2](implicit asPair: A => (A1, A2), ct1: ClassTag[A1], ct2: ClassTag[A2]): (Array[A1], Array[A2]) = { val a1 = new Array[A1](xs.length) val a2 = new Array[A2](xs.length) var i = 0 @@ -1272,7 +1273,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return a triple of Arrays, containing, respectively, the first, second, and third * elements from each element triple of this Array. */ - def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3), ct1: ClassTag[A1], ct2: ClassTag[A2], + def unzip3[sealed A1, sealed A2, sealed A3](implicit asTriple: A => (A1, A2, A3), ct1: ClassTag[A1], ct2: ClassTag[A2], ct3: ClassTag[A3]): (Array[A1], Array[A2], Array[A3]) = { val a1 = new Array[A1](xs.length) val a2 = new Array[A2](xs.length) @@ -1294,7 +1295,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @param asArray A function that converts elements of this array to rows - arrays of type `B`. * @return An array obtained by replacing elements of this arrays with rows the represent. */ - def transpose[B](implicit asArray: A => Array[B]): Array[Array[B]] = { + def transpose[sealed B](implicit asArray: A => Array[B]): Array[Array[B]] = { val aClass = xs.getClass.getComponentType val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass)) if (xs.length == 0) bb.result() @@ -1345,7 +1346,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @tparam B the type of the elements after being transformed by `f` * @return a new array consisting of all the elements of this array without duplicates. */ - def distinctBy[B](f: A => B): Array[A] = + def distinctBy[B](f: A -> B): Array[A] = ArrayBuilder.make[A].addAll(iterator.distinctBy(f)).result() /** A copy of this array with an element value appended until a given target length is reached. @@ -1357,7 +1358,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * all elements of this array followed by the minimal number of occurrences of `elem` so * that the resulting collection has a length of at least `len`. */ - def padTo[B >: A : ClassTag](len: Int, elem: B): Array[B] = { + def padTo[sealed B >: A : ClassTag](len: Int, elem: B): Array[B] = { var i = xs.length val newlen = max(i, len) val dest = Array.copyAs[B](xs, newlen) @@ -1417,7 +1418,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @tparam K the type of keys returned by the discriminator function * @tparam B the type of values returned by the transformation function */ - def groupMap[K, B : ClassTag](key: A => K)(f: A => B): immutable.Map[K, Array[B]] = { + def groupMap[K, sealed B : ClassTag](key: A => K)(f: A => B): immutable.Map[K, Array[B]] = { val m = mutable.Map.empty[K, ArrayBuilder[B]] val len = xs.length var i = 0 @@ -1444,7 +1445,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @param xs the array to fill. * @tparam B the type of the elements of the array. */ - def copyToArray[B >: A](xs: Array[B]): Int = copyToArray(xs, 0) + def copyToArray[sealed B >: A](xs: Array[B]): Int = copyToArray(xs, 0) /** Copy elements of this array to another array. * Fills the given array `xs` starting at index `start`. @@ -1455,7 +1456,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @param start the starting index within the destination array. * @tparam B the type of the elements of the array. */ - def copyToArray[B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) + def copyToArray[sealed B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) /** Copy elements of this array to another array. * Fills the given array `xs` starting at index `start` with at most `len` values. @@ -1467,7 +1468,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @param len the maximal number of elements to copy. * @tparam B the type of the elements of the array. */ - def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { val copied = IterableOnce.elemsToCopyToArray(this.xs.length, xs.length, start, len) if (copied > 0) { Array.copy(this.xs, 0, xs, start, copied) @@ -1476,7 +1477,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { } /** Create a copy of this array with the specified element type. */ - def toArray[B >: A: ClassTag]: Array[B] = { + def toArray[sealed B >: A: ClassTag]: Array[B] = { val destination = new Array[B](xs.length) copyToArray(destination, 0) destination @@ -1495,7 +1496,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { // can't use a default arg because we already have another overload with a default arg /** Tests whether this array starts with the given array. */ - @`inline` def startsWith[B >: A](that: Array[B]): Boolean = startsWith(that, 0) + @`inline` def startsWith[sealed B >: A](that: Array[B]): Boolean = startsWith(that, 0) /** Tests whether this array contains the given array at a given index. * @@ -1504,7 +1505,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return `true` if the array `that` is contained in this array at * index `offset`, otherwise `false`. */ - def startsWith[B >: A](that: Array[B], offset: Int): Boolean = { + def startsWith[sealed B >: A](that: Array[B], offset: Int): Boolean = { val safeOffset = offset.max(0) val thatl = that.length if(thatl > xs.length-safeOffset) thatl == 0 @@ -1523,7 +1524,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @param that the array to test * @return `true` if this array has `that` as a suffix, `false` otherwise. */ - def endsWith[B >: A](that: Array[B]): Boolean = { + def endsWith[sealed B >: A](that: Array[B]): Boolean = { val thatl = that.length val off = xs.length - thatl if(off < 0) false @@ -1543,7 +1544,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * @return a new array which is a copy of this array with the element at position `index` replaced by `elem`. * @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`. */ - def updated[B >: A : ClassTag](index: Int, elem: B): Array[B] = { + def updated[sealed B >: A : ClassTag](index: Int, elem: B): Array[B] = { if(index < 0 || index >= xs.length) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${xs.length-1})") val dest = toArray[B] dest(index) = elem diff --git a/tests/pos-special/stdlib/collection/BitSet.scala b/tests/pos-special/stdlib/collection/BitSet.scala index e8ca89806455..39c15dbe808f 100644 --- a/tests/pos-special/stdlib/collection/BitSet.scala +++ b/tests/pos-special/stdlib/collection/BitSet.scala @@ -18,7 +18,7 @@ import java.io.{ObjectInputStream, ObjectOutputStream} import scala.annotation.nowarn import scala.collection.Stepper.EfficientSplit import scala.collection.mutable.Builder - +import language.experimental.captureChecking /** Base type of bitsets. * @@ -33,7 +33,7 @@ import scala.collection.mutable.Builder * @define Coll `BitSet` */ trait BitSet extends SortedSet[Int] with BitSetOps[BitSet] { - override protected def fromSpecific(coll: IterableOnce[Int]): BitSet = bitSetFactory.fromSpecific(coll) + override protected def fromSpecific(coll: IterableOnce[Int]^): BitSet = bitSetFactory.fromSpecific(coll) override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder override def empty: BitSet = bitSetFactory.empty @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") @@ -48,7 +48,7 @@ object BitSet extends SpecificIterableFactory[Int, BitSet] { def empty: BitSet = immutable.BitSet.empty def newBuilder: Builder[Int, BitSet] = immutable.BitSet.newBuilder - def fromSpecific(it: IterableOnce[Int]): BitSet = immutable.BitSet.fromSpecific(it) + def fromSpecific(it: IterableOnce[Int]^): BitSet = immutable.BitSet.fromSpecific(it) @SerialVersionUID(3L) private[collection] abstract class SerializationProxy(@transient protected val coll: BitSet) extends Serializable { diff --git a/tests/pos-special/stdlib/collection/BufferedIterator.scala b/tests/pos-special/stdlib/collection/BufferedIterator.scala index bc35ee0a25da..cca40dd31d40 100644 --- a/tests/pos-special/stdlib/collection/BufferedIterator.scala +++ b/tests/pos-special/stdlib/collection/BufferedIterator.scala @@ -11,7 +11,7 @@ */ package scala.collection - +import language.experimental.captureChecking /** Buffered iterators are iterators which provide a method `head` * that inspects the next element without discarding it. diff --git a/tests/pos-special/stdlib/collection/BuildFrom.scala b/tests/pos-special/stdlib/collection/BuildFrom.scala index bc9c49d9493c..0a3cc199d4dc 100644 --- a/tests/pos-special/stdlib/collection/BuildFrom.scala +++ b/tests/pos-special/stdlib/collection/BuildFrom.scala @@ -16,6 +16,8 @@ import scala.annotation.implicitNotFound import scala.collection.mutable.Builder import scala.collection.immutable.WrappedString import scala.reflect.ClassTag +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure /** Builds a collection of type `C` from elements of type `A` when a source collection of type `From` is available. * Implicit instances of `BuildFrom` are available for all collection types. @@ -26,7 +28,11 @@ import scala.reflect.ClassTag */ @implicitNotFound(msg = "Cannot construct a collection of type ${C} with elements of type ${A} based on a collection of type ${From}.") trait BuildFrom[-From, -A, +C] extends Any { self => - def fromSpecific(from: From)(it: IterableOnce[A]): C + def fromSpecific(from: From)(it: IterableOnce[A]^): C + // !!! this is wrong, we need two versions of fromSpecific; one mapping + // to C^{it} when C is an Iterable, and one mapping to C when C is a Seq, Map, or Set. + // But that requires a large scale refactoring of BuildFrom. The unsafeAssumePure + // calls in this file are needed to sweep that problem under the carpet. /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ @@ -37,7 +43,7 @@ trait BuildFrom[-From, -A, +C] extends Any { self => /** Partially apply a BuildFrom to a Factory */ def toFactory(from: From): Factory[A, C] = new Factory[A, C] { - def fromSpecific(it: IterableOnce[A]): C = self.fromSpecific(from)(it) + def fromSpecific(it: IterableOnce[A]^): C = self.fromSpecific(from)(it) def newBuilder: Builder[A, C] = self.newBuilder(from) } } @@ -48,42 +54,42 @@ object BuildFrom extends BuildFromLowPriority1 { implicit def buildFromMapOps[CC[X, Y] <: Map[X, Y] with MapOps[X, Y, CC, _], K0, V0, K, V]: BuildFrom[CC[K0, V0] with Map[K0, V0], (K, V), CC[K, V] with Map[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] { //TODO: Reuse a prototype instance def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: MapOps[K0, V0, CC, _]).mapFactory.newBuilder[K, V] - def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]): CC[K, V] = (from: MapOps[K0, V0, CC, _]).mapFactory.from(it) + def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]^): CC[K, V] = (from: MapOps[K0, V0, CC, _]).mapFactory.from(it) } /** Build the source collection type from a SortedMapOps */ implicit def buildFromSortedMapOps[CC[X, Y] <: SortedMap[X, Y] with SortedMapOps[X, Y, CC, _], K0, V0, K : Ordering, V]: BuildFrom[CC[K0, V0] with SortedMap[K0, V0], (K, V), CC[K, V] with SortedMap[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] { def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.newBuilder[K, V] - def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]): CC[K, V] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.from(it) + def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]^): CC[K, V] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.from(it) } implicit def buildFromBitSet[C <: BitSet with BitSetOps[C]]: BuildFrom[C, Int, C] = new BuildFrom[C, Int, C] { - def fromSpecific(from: C)(it: IterableOnce[Int]): C = from.bitSetFactory.fromSpecific(it) + def fromSpecific(from: C)(it: IterableOnce[Int]^): C = from.bitSetFactory.fromSpecific(it) def newBuilder(from: C): Builder[Int, C] = from.bitSetFactory.newBuilder } implicit val buildFromString: BuildFrom[String, Char, String] = new BuildFrom[String, Char, String] { - def fromSpecific(from: String)(it: IterableOnce[Char]): String = Factory.stringFactory.fromSpecific(it) + def fromSpecific(from: String)(it: IterableOnce[Char]^): String = Factory.stringFactory.fromSpecific(it) def newBuilder(from: String): Builder[Char, String] = Factory.stringFactory.newBuilder } implicit val buildFromWrappedString: BuildFrom[WrappedString, Char, WrappedString] = new BuildFrom[WrappedString, Char, WrappedString] { - def fromSpecific(from: WrappedString)(it: IterableOnce[Char]): WrappedString = WrappedString.fromSpecific(it) + def fromSpecific(from: WrappedString)(it: IterableOnce[Char]^): WrappedString = WrappedString.fromSpecific(it) def newBuilder(from: WrappedString): mutable.Builder[Char, WrappedString] = WrappedString.newBuilder } - implicit def buildFromArray[A : ClassTag]: BuildFrom[Array[_], A, Array[A]] = + implicit def buildFromArray[sealed A : ClassTag]: BuildFrom[Array[_], A, Array[A]] = new BuildFrom[Array[_], A, Array[A]] { - def fromSpecific(from: Array[_])(it: IterableOnce[A]): Array[A] = Factory.arrayFactory[A].fromSpecific(it) + def fromSpecific(from: Array[_])(it: IterableOnce[A]^): Array[A] = Factory.arrayFactory[A].fromSpecific(it) def newBuilder(from: Array[_]): Builder[A, Array[A]] = Factory.arrayFactory[A].newBuilder } - implicit def buildFromView[A, B]: BuildFrom[View[A], B, View[B]] = + implicit def buildFromView[A, sealed B]: BuildFrom[View[A], B, View[B]] = new BuildFrom[View[A], B, View[B]] { - def fromSpecific(from: View[A])(it: IterableOnce[B]): View[B] = View.from(it) + def fromSpecific(from: View[A])(it: IterableOnce[B]^): View[B] = View.from(it).unsafeAssumePure def newBuilder(from: View[A]): Builder[B, View[B]] = View.newBuilder } @@ -97,12 +103,12 @@ trait BuildFromLowPriority1 extends BuildFromLowPriority2 { // test in test/junit/scala/collection/BuildFromTest.scala and discussion in https://github.com/scala/scala/pull/10209 implicit def buildFromSortedSetOps[CC[X] <: SortedSet[X] with SortedSetOps[X, CC, _], A0, A : Ordering]: BuildFrom[CC[A0] with SortedSet[A0], A, CC[A] with SortedSet[A]] = new BuildFrom[CC[A0], A, CC[A]] { def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.newBuilder[A] - def fromSpecific(from: CC[A0])(it: IterableOnce[A]): CC[A] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.from(it) + def fromSpecific(from: CC[A0])(it: IterableOnce[A]^): CC[A] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.from(it) } implicit def fallbackStringCanBuildFrom[A]: BuildFrom[String, A, immutable.IndexedSeq[A]] = new BuildFrom[String, A, immutable.IndexedSeq[A]] { - def fromSpecific(from: String)(it: IterableOnce[A]): immutable.IndexedSeq[A] = immutable.IndexedSeq.from(it) + def fromSpecific(from: String)(it: IterableOnce[A]^): immutable.IndexedSeq[A] = immutable.IndexedSeq.from(it) def newBuilder(from: String): Builder[A, immutable.IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A] } } @@ -112,11 +118,11 @@ trait BuildFromLowPriority2 { implicit def buildFromIterableOps[CC[X] <: Iterable[X] with IterableOps[X, CC, _], A0, A]: BuildFrom[CC[A0], A, CC[A]] = new BuildFrom[CC[A0], A, CC[A]] { //TODO: Reuse a prototype instance def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: IterableOps[A0, CC, _]).iterableFactory.newBuilder[A] - def fromSpecific(from: CC[A0])(it: IterableOnce[A]): CC[A] = (from: IterableOps[A0, CC, _]).iterableFactory.from(it) + def fromSpecific(from: CC[A0])(it: IterableOnce[A]^): CC[A] = (from: IterableOps[A0, CC, _]).iterableFactory.from(it).unsafeAssumePure } implicit def buildFromIterator[A]: BuildFrom[Iterator[_], A, Iterator[A]] = new BuildFrom[Iterator[_], A, Iterator[A]] { def newBuilder(from: Iterator[_]): mutable.Builder[A, Iterator[A]] = Iterator.newBuilder - def fromSpecific(from: Iterator[_])(it: IterableOnce[A]): Iterator[A] = Iterator.from(it) + def fromSpecific(from: Iterator[_])(it: IterableOnce[A]^): Iterator[A] = Iterator.from(it).unsafeAssumePure } } diff --git a/tests/pos-special/stdlib/collection/DefaultMap.scala b/tests/pos-special/stdlib/collection/DefaultMap.scala index cbc61d8c0268..baa9eceadae5 100644 --- a/tests/pos-special/stdlib/collection/DefaultMap.scala +++ b/tests/pos-special/stdlib/collection/DefaultMap.scala @@ -12,7 +12,7 @@ package scala package collection - +import language.experimental.captureChecking /** A default map which builds a default `immutable.Map` implementation for all * transformations. diff --git a/tests/pos-special/stdlib/collection/Factory.scala b/tests/pos-special/stdlib/collection/Factory.scala index 2b15f1cc15d1..c45776b62b9c 100644 --- a/tests/pos-special/stdlib/collection/Factory.scala +++ b/tests/pos-special/stdlib/collection/Factory.scala @@ -18,6 +18,8 @@ import scala.language.implicitConversions import scala.collection.mutable.Builder import scala.annotation.unchecked.uncheckedVariance import scala.reflect.ClassTag +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure /** * A factory that builds a collection of type `C` with elements of type `A`. @@ -29,14 +31,14 @@ import scala.reflect.ClassTag * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) */ -trait Factory[-A, +C] extends Any { +trait Factory[-A, +C] extends Pure { /** * @return A collection of type `C` containing the same elements * as the source collection `it`. * @param it Source collection */ - def fromSpecific(it: IterableOnce[A]): C + def fromSpecific(it: IterableOnce[A]^): C /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ @@ -48,7 +50,7 @@ object Factory { implicit val stringFactory: Factory[Char, String] = new StringFactory @SerialVersionUID(3L) private class StringFactory extends Factory[Char, String] with Serializable { - def fromSpecific(it: IterableOnce[Char]): String = { + def fromSpecific(it: IterableOnce[Char]^): String = { val b = new mutable.StringBuilder(scala.math.max(0, it.knownSize)) b ++= it b.result() @@ -56,10 +58,10 @@ object Factory { def newBuilder: Builder[Char, String] = new mutable.StringBuilder() } - implicit def arrayFactory[A: ClassTag]: Factory[A, Array[A]] = new ArrayFactory[A] + implicit def arrayFactory[sealed A: ClassTag]: Factory[A, Array[A]] = new ArrayFactory[A] @SerialVersionUID(3L) - private class ArrayFactory[A: ClassTag] extends Factory[A, Array[A]] with Serializable { - def fromSpecific(it: IterableOnce[A]): Array[A] = { + private class ArrayFactory[sealed A: ClassTag] extends Factory[A, Array[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]^): Array[A] = { val b = newBuilder b.sizeHint(scala.math.max(0, it.knownSize)) b ++= it @@ -80,7 +82,7 @@ object Factory { * @define coll collection * @define Coll `Iterable` */ -trait IterableFactory[+CC[_]] extends Serializable { +trait IterableFactory[+CC[_]] extends Serializable, Pure { /** Creates a target $coll from an existing source collection * @@ -88,7 +90,7 @@ trait IterableFactory[+CC[_]] extends Serializable { * @tparam A the type of the collection’s elements * @return a new $coll with the elements of `source` */ - def from[A](source: IterableOnce[A]): CC[A] + def from[A](source: IterableOnce[A]^): CC[A]^{source} /** An empty collection * @tparam A the type of the ${coll}'s elements @@ -109,7 +111,7 @@ trait IterableFactory[+CC[_]] extends Serializable { * @param f the function that's repeatedly applied * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` */ - def iterate[A](start: A, len: Int)(f: A => A): CC[A] = from(new View.Iterate(start, len)(f)) + def iterate[A](start: A, len: Int)(f: A => A): CC[A]^{f} = from(new View.Iterate(start, len)(f)) /** Produces a $coll that uses a function `f` to produce elements of type `A` * and update an internal state of type `S`. @@ -121,7 +123,7 @@ trait IterableFactory[+CC[_]] extends Serializable { * @tparam S Type of the internal state * @return a $coll that produces elements using `f` until `f` returns `None` */ - def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = from(new View.Unfold(init)(f)) + def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A]^{f} = from(new View.Unfold(init)(f)) /** Produces a $coll containing a sequence of increasing of integers. * @@ -150,7 +152,7 @@ trait IterableFactory[+CC[_]] extends Serializable { * @param elem the element computation * @return A $coll that contains the results of `n` evaluations of `elem`. */ - def fill[A](n: Int)(elem: => A): CC[A] = from(new View.Fill(n)(elem)) + def fill[A](n: Int)(elem: => A): CC[A]^{elem} = from(new View.Fill(n)(elem)) /** Produces a two-dimensional $coll containing the results of some element computation a number of times. * @param n1 the number of elements in the 1st dimension @@ -158,7 +160,8 @@ trait IterableFactory[+CC[_]] extends Serializable { * @param elem the element computation * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. */ - def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance] = fill(n1)(fill(n2)(elem)) + def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc + ??? // fill(n1)(fill(n2)(elem)) /** Produces a three-dimensional $coll containing the results of some element computation a number of times. * @param n1 the number of elements in the 1st dimension @@ -167,7 +170,8 @@ trait IterableFactory[+CC[_]] extends Serializable { * @param elem the element computation * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. */ - def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance] = fill(n1)(fill(n2, n3)(elem)) + def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc + ??? // fill(n1)(fill(n2, n3)(elem)).unsafeAssumePure /** Produces a four-dimensional $coll containing the results of some element computation a number of times. * @param n1 the number of elements in the 1st dimension @@ -177,8 +181,8 @@ trait IterableFactory[+CC[_]] extends Serializable { * @param elem the element computation * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. */ - def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = - fill(n1)(fill(n2, n3, n4)(elem)) + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc + ??? // fill(n1)(fill(n2, n3, n4)(elem)) /** Produces a five-dimensional $coll containing the results of some element computation a number of times. * @param n1 the number of elements in the 1st dimension @@ -189,15 +193,15 @@ trait IterableFactory[+CC[_]] extends Serializable { * @param elem the element computation * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. */ - def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = - fill(n1)(fill(n2, n3, n4, n5)(elem)) + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc + ??? // fill(n1)(fill(n2, n3, n4, n5)(elem)) /** Produces a $coll containing values of a given function over a range of integer values starting from 0. * @param n The number of elements in the $coll * @param f The function computing element values * @return A $coll consisting of elements `f(0), ..., f(n -1)` */ - def tabulate[A](n: Int)(f: Int => A): CC[A] = from(new View.Tabulate(n)(f)) + def tabulate[A](n: Int)(f: Int => A): CC[A]^{f} = from(new View.Tabulate(n)(f)) /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. * @param n1 the number of elements in the 1st dimension @@ -206,8 +210,8 @@ trait IterableFactory[+CC[_]] extends Serializable { * @return A $coll consisting of elements `f(i1, i2)` * for `0 <= i1 < n1` and `0 <= i2 < n2`. */ - def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance] = - tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc + ??? // tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. * @param n1 the number of elements in the 1st dimension @@ -217,8 +221,8 @@ trait IterableFactory[+CC[_]] extends Serializable { * @return A $coll consisting of elements `f(i1, i2, i3)` * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. */ - def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance] = - tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc + ??? // tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. * @param n1 the number of elements in the 1st dimension @@ -229,8 +233,8 @@ trait IterableFactory[+CC[_]] extends Serializable { * @return A $coll consisting of elements `f(i1, i2, i3, i4)` * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. */ - def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = - tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc + ??? // tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. * @param n1 the number of elements in the 1st dimension @@ -242,8 +246,8 @@ trait IterableFactory[+CC[_]] extends Serializable { * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. */ - def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = - tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc + ??? // tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) /** Concatenates all argument collections into a single $coll. * @@ -271,13 +275,15 @@ object IterableFactory { @SerialVersionUID(3L) private[this] class ToFactory[A, CC[_]](factory: IterableFactory[CC]) extends Factory[A, CC[A]] with Serializable { - def fromSpecific(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def fromSpecific(it: IterableOnce[A]^): CC[A] = factory.from[A](it).unsafeAssumePure + // unsafeAssumePure needed but is unsound, since we confuse Seq and Iterable fromSpecific def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] } implicit def toBuildFrom[A, CC[_]](factory: IterableFactory[CC]): BuildFrom[Any, A, CC[A]] = new BuildFrom[Any, A, CC[A]] { - def fromSpecific(from: Any)(it: IterableOnce[A]) = factory.from(it) + def fromSpecific(from: Any)(it: IterableOnce[A]^): CC[A] = + factory.from(it).unsafeAssumePure // !!! see remark in BuildFrom why this is necessary def newBuilder(from: Any) = factory.newBuilder } @@ -285,15 +291,20 @@ object IterableFactory { class Delegate[CC[_]](delegate: IterableFactory[CC]) extends IterableFactory[CC] { override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) def empty[A]: CC[A] = delegate.empty - def from[E](it: IterableOnce[E]): CC[E] = delegate.from(it) + def from[E](it: IterableOnce[E]^): CC[E]^{it} = delegate.from(it) def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] } } +// !!! Needed to add this separate trait +trait FreeSeqFactory[+CC[A]] extends IterableFactory[CC]: + def from[A](source: IterableOnce[A]^): CC[A] + override def apply[A](elems: A*): CC[A] = from(elems) + /** * @tparam CC Collection type constructor (e.g. `List`) */ -trait SeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends IterableFactory[CC] { +trait SeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends FreeSeqFactory[CC] { import SeqFactory.UnapplySeqWrapper final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? } @@ -303,7 +314,7 @@ object SeqFactory { class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: SeqFactory[CC]) extends SeqFactory[CC] { override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) def empty[A]: CC[A] = delegate.empty - def from[E](it: IterableOnce[E]): CC[E] = delegate.from(it) + def from[E](it: IterableOnce[E]^): CC[E] = delegate.from(it) def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] } @@ -366,6 +377,8 @@ trait StrictOptimizedSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends SeqFac * @define Coll `Iterable` */ trait SpecificIterableFactory[-A, +C] extends Factory[A, C] { + this: SpecificIterableFactory[A, C] => + def empty: C def apply(xs: A*): C = fromSpecific(xs) def fill(n: Int)(elem: => A): C = fromSpecific(new View.Fill(n)(elem)) @@ -381,7 +394,7 @@ trait SpecificIterableFactory[-A, +C] extends Factory[A, C] { * @define coll collection * @define Coll `Iterable` */ -trait MapFactory[+CC[_, _]] extends Serializable { +trait MapFactory[+CC[_, _]] extends Serializable, Pure { /** * An empty Map @@ -391,7 +404,7 @@ trait MapFactory[+CC[_, _]] extends Serializable { /** * A collection of type Map generated from given iterable object. */ - def from[K, V](it: IterableOnce[(K, V)]): CC[K, V] + def from[K, V](it: IterableOnce[(K, V)]^): CC[K, V] /** * A collection of type Map that contains given key/value bindings. @@ -424,20 +437,20 @@ object MapFactory { @SerialVersionUID(3L) private[this] class ToFactory[K, V, CC[_, _]](factory: MapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { - def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = factory.from[K, V](it) + def fromSpecific(it: IterableOnce[(K, V)]^): CC[K, V] = factory.from[K, V](it) def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] } implicit def toBuildFrom[K, V, CC[_, _]](factory: MapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = new BuildFrom[Any, (K, V), CC[K, V]] { - def fromSpecific(from: Any)(it: IterableOnce[(K, V)]) = factory.from(it) + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]^) = factory.from(it) def newBuilder(from: Any) = factory.newBuilder[K, V] } @SerialVersionUID(3L) class Delegate[C[_, _]](delegate: MapFactory[C]) extends MapFactory[C] { override def apply[K, V](elems: (K, V)*): C[K, V] = delegate.apply(elems: _*) - def from[K, V](it: IterableOnce[(K, V)]): C[K, V] = delegate.from(it) + def from[K, V](it: IterableOnce[(K, V)]^): C[K, V] = delegate.from(it) def empty[K, V]: C[K, V] = delegate.empty def newBuilder[K, V]: Builder[(K, V), C[K, V]] = delegate.newBuilder } @@ -454,9 +467,9 @@ object MapFactory { * @define coll collection * @define Coll `Iterable` */ -trait EvidenceIterableFactory[+CC[_], Ev[_]] extends Serializable { +trait EvidenceIterableFactory[+CC[_], Ev[_]] extends Serializable, Pure { - def from[E : Ev](it: IterableOnce[E]): CC[E] + def from[E : Ev](it: IterableOnce[E]^): CC[E] def empty[A : Ev]: CC[A] @@ -517,13 +530,13 @@ object EvidenceIterableFactory { @SerialVersionUID(3L) private[this] class ToFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends Factory[A, CC[A]] with Serializable { - def fromSpecific(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def fromSpecific(it: IterableOnce[A]^): CC[A] = factory.from[A](it) def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] } implicit def toBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): BuildFrom[Any, A, CC[A]] = new EvidenceIterableFactoryToBuildFrom(factory) private class EvidenceIterableFactoryToBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends BuildFrom[Any, A, CC[A]] { - def fromSpecific(from: Any)(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def fromSpecific(from: Any)(it: IterableOnce[A]^): CC[A] = factory.from[A](it) def newBuilder(from: Any): Builder[A, CC[A]] = factory.newBuilder[A] } @@ -531,7 +544,7 @@ object EvidenceIterableFactory { class Delegate[CC[_], Ev[_]](delegate: EvidenceIterableFactory[CC, Ev]) extends EvidenceIterableFactory[CC, Ev] { override def apply[A: Ev](xs: A*): CC[A] = delegate.apply(xs: _*) def empty[A : Ev]: CC[A] = delegate.empty - def from[E : Ev](it: IterableOnce[E]): CC[E] = delegate.from(it) + def from[E : Ev](it: IterableOnce[E]^): CC[E] = delegate.from(it) def newBuilder[A : Ev]: Builder[A, CC[A]] = delegate.newBuilder[A] } } @@ -668,7 +681,7 @@ object ClassTagIterableFactory { @SerialVersionUID(3L) class AnyIterableDelegate[CC[_]](delegate: ClassTagIterableFactory[CC]) extends IterableFactory[CC] { def empty[A]: CC[A] = delegate.empty(ClassTag.Any).asInstanceOf[CC[A]] - def from[A](it: IterableOnce[A]): CC[A] = delegate.from[Any](it)(ClassTag.Any).asInstanceOf[CC[A]] + def from[A](it: IterableOnce[A]^): CC[A] = delegate.from[Any](it)(ClassTag.Any).asInstanceOf[CC[A]] def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder(ClassTag.Any).asInstanceOf[Builder[A, CC[A]]] override def apply[A](elems: A*): CC[A] = delegate.apply[Any](elems: _*)(ClassTag.Any).asInstanceOf[CC[A]] override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = delegate.iterate[A](start, len)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]]) @@ -734,10 +747,11 @@ trait StrictOptimizedClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extend * @define Coll `Iterable` */ trait SortedMapFactory[+CC[_, _]] extends Serializable { + this: SortedMapFactory[CC] => def empty[K : Ordering, V]: CC[K, V] - def from[K : Ordering, V](it: IterableOnce[(K, V)]): CC[K, V] + def from[K : Ordering, V](it: IterableOnce[(K, V)]^): CC[K, V] def apply[K : Ordering, V](elems: (K, V)*): CC[K, V] = from(elems) @@ -764,20 +778,20 @@ object SortedMapFactory { @SerialVersionUID(3L) private[this] class ToFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { - def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = factory.from[K, V](it) + def fromSpecific(it: IterableOnce[(K, V)]^): CC[K, V] = factory.from[K, V](it) def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] } implicit def toBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = new SortedMapFactoryToBuildFrom(factory) private class SortedMapFactoryToBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends BuildFrom[Any, (K, V), CC[K, V]] { - def fromSpecific(from: Any)(it: IterableOnce[(K, V)]) = factory.from(it) + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]^) = factory.from(it) def newBuilder(from: Any) = factory.newBuilder[K, V] } @SerialVersionUID(3L) class Delegate[CC[_, _]](delegate: SortedMapFactory[CC]) extends SortedMapFactory[CC] { override def apply[K: Ordering, V](elems: (K, V)*): CC[K, V] = delegate.apply(elems: _*) - def from[K : Ordering, V](it: IterableOnce[(K, V)]): CC[K, V] = delegate.from(it) + def from[K : Ordering, V](it: IterableOnce[(K, V)]^): CC[K, V] = delegate.from(it) def empty[K : Ordering, V]: CC[K, V] = delegate.empty def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] = delegate.newBuilder } diff --git a/tests/pos-special/stdlib/collection/Hashing.scala b/tests/pos-special/stdlib/collection/Hashing.scala index 4e1fd872b8b5..772dcf5c65da 100644 --- a/tests/pos-special/stdlib/collection/Hashing.scala +++ b/tests/pos-special/stdlib/collection/Hashing.scala @@ -12,6 +12,7 @@ package scala package collection +import language.experimental.captureChecking protected[collection] object Hashing { diff --git a/tests/pos-special/stdlib/collection/IndexedSeq.scala b/tests/pos-special/stdlib/collection/IndexedSeq.scala index a82d5384779a..a2d4cc942231 100644 --- a/tests/pos-special/stdlib/collection/IndexedSeq.scala +++ b/tests/pos-special/stdlib/collection/IndexedSeq.scala @@ -17,6 +17,9 @@ import scala.annotation.{nowarn, tailrec} import scala.collection.Searching.{Found, InsertionPoint, SearchResult} import scala.collection.Stepper.EfficientSplit import scala.math.Ordering +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure + /** Base trait for indexed sequences that have efficient `apply` and `length` */ trait IndexedSeq[+A] extends Seq[A] @@ -32,7 +35,7 @@ trait IndexedSeq[+A] extends Seq[A] object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](immutable.IndexedSeq) /** Base trait for indexed Seq operations */ -trait IndexedSeqOps[+A, +CC[_], +C] extends Any with SeqOps[A, CC, C] { self => +trait IndexedSeqOps[+A, +CC[_], +C] extends Any with IndexedSeqViewOps[A, CC, C] with SeqOps[A, CC, C] { self => def iterator: Iterator[A] = view.iterator @@ -85,7 +88,7 @@ trait IndexedSeqOps[+A, +CC[_], +C] extends Any with SeqOps[A, CC, C] { self => override def dropRight(n: Int): C = fromSpecific(new IndexedSeqView.DropRight(this, n)) - override def map[B](f: A => B): CC[B] = iterableFactory.from(new IndexedSeqView.Map(this, f)) + override def map[B](f: A => B): CC[B] = iterableFactory.from(new IndexedSeqView.Map(this, f)).unsafeAssumePure override def reverse: C = fromSpecific(new IndexedSeqView.Reverse(this)) @@ -103,7 +106,7 @@ trait IndexedSeqOps[+A, +CC[_], +C] extends Any with SeqOps[A, CC, C] { self => override def knownSize: Int = length - override final def lengthCompare(that: Iterable[_]): Int = { + override final def lengthCompare(that: Iterable[_]^): Int = { val res = that.sizeCompare(length) // can't just invert the result, because `-Int.MinValue == Int.MinValue` if (res == Int.MinValue) 1 else -res diff --git a/tests/pos-special/stdlib/collection/IndexedSeqView.scala b/tests/pos-special/stdlib/collection/IndexedSeqView.scala index 737f032d2060..a16e06fa707d 100644 --- a/tests/pos-special/stdlib/collection/IndexedSeqView.scala +++ b/tests/pos-special/stdlib/collection/IndexedSeqView.scala @@ -14,33 +14,38 @@ package scala package collection import scala.annotation.nowarn +import language.experimental.captureChecking +trait IndexedSeqViewOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { + self: IndexedSeqViewOps[A, CC, C]^ => +} /** View defined in terms of indexing a range */ -trait IndexedSeqView[+A] extends IndexedSeqOps[A, View, View[A]] with SeqView[A] { self => +trait IndexedSeqView[+A] extends IndexedSeqViewOps[A, View, View[A]] with SeqView[A] { + self: IndexedSeqView[A]^ => - override def view: IndexedSeqView[A] = this + override def view: IndexedSeqView[A]^{this} = this @deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0") - override def view(from: Int, until: Int): IndexedSeqView[A] = view.slice(from, until) - - override def iterator: Iterator[A] = new IndexedSeqView.IndexedSeqViewIterator(this) - override def reverseIterator: Iterator[A] = new IndexedSeqView.IndexedSeqViewReverseIterator(this) - - override def appended[B >: A](elem: B): IndexedSeqView[B] = new IndexedSeqView.Appended(this, elem) - override def prepended[B >: A](elem: B): IndexedSeqView[B] = new IndexedSeqView.Prepended(elem, this) - override def take(n: Int): IndexedSeqView[A] = new IndexedSeqView.Take(this, n) - override def takeRight(n: Int): IndexedSeqView[A] = new IndexedSeqView.TakeRight(this, n) - override def drop(n: Int): IndexedSeqView[A] = new IndexedSeqView.Drop(this, n) - override def dropRight(n: Int): IndexedSeqView[A] = new IndexedSeqView.DropRight(this, n) - override def map[B](f: A => B): IndexedSeqView[B] = new IndexedSeqView.Map(this, f) - override def reverse: IndexedSeqView[A] = new IndexedSeqView.Reverse(this) - override def slice(from: Int, until: Int): IndexedSeqView[A] = new IndexedSeqView.Slice(this, from, until) - override def tapEach[U](f: A => U): IndexedSeqView[A] = new IndexedSeqView.Map(this, { (a: A) => f(a); a}) - - def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new IndexedSeqView.Concat(this, suffix) - def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new IndexedSeqView.Concat(this, suffix) - def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new IndexedSeqView.Concat(prefix, this) + override def view(from: Int, until: Int): IndexedSeqView[A]^{this} = view.slice(from, until) + + override def iterator: Iterator[A]^{this} = new IndexedSeqView.IndexedSeqViewIterator(this) + override def reverseIterator: Iterator[A]^{this} = new IndexedSeqView.IndexedSeqViewReverseIterator(this) + + override def appended[B >: A](elem: B): IndexedSeqView[B]^{this} = new IndexedSeqView.Appended(this, elem) + override def prepended[B >: A](elem: B): IndexedSeqView[B]^{this} = new IndexedSeqView.Prepended(elem, this) + override def take(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Take(this, n) + override def takeRight(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.TakeRight(this, n) + override def drop(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Drop(this, n) + override def dropRight(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.DropRight(this, n) + override def map[B](f: A => B): IndexedSeqView[B]^{this, f} = new IndexedSeqView.Map(this, f) + override def reverse: IndexedSeqView[A]^{this} = new IndexedSeqView.Reverse(this) + override def slice(from: Int, until: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Slice(this, from, until) + override def tapEach[U](f: A => U): IndexedSeqView[A]^{this, f} = new IndexedSeqView.Map(this, { (a: A) => f(a); a}) + + def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(this, suffix) + def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(this, suffix) + def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(prefix, this) @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") override protected[this] def stringPrefix: String = "IndexedSeqView" @@ -49,7 +54,8 @@ trait IndexedSeqView[+A] extends IndexedSeqOps[A, View, View[A]] with SeqView[A] object IndexedSeqView { @SerialVersionUID(3L) - private[collection] class IndexedSeqViewIterator[A](self: IndexedSeqView[A]) extends AbstractIterator[A] with Serializable { + private[collection] class IndexedSeqViewIterator[A](self: IndexedSeqView[A]^) extends AbstractIterator[A] with Serializable { + this: IndexedSeqViewIterator[A]^ => private[this] var current = 0 private[this] var remainder = self.length override def knownSize: Int = remainder @@ -63,7 +69,7 @@ object IndexedSeqView { r } else Iterator.empty.next() - override def drop(n: Int): Iterator[A] = { + override def drop(n: Int): Iterator[A]^{this} = { if (n > 0) { current += n remainder = Math.max(0, remainder - n) @@ -71,7 +77,7 @@ object IndexedSeqView { this } - override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { + override protected def sliceIterator(from: Int, until: Int): Iterator[A]^{this} = { def formatRange(value : Int) : Int = if (value < 0) 0 else if (value > remainder) remainder else value @@ -83,7 +89,8 @@ object IndexedSeqView { } } @SerialVersionUID(3L) - private[collection] class IndexedSeqViewReverseIterator[A](self: IndexedSeqView[A]) extends AbstractIterator[A] with Serializable { + private[collection] class IndexedSeqViewReverseIterator[A](self: IndexedSeqView[A]^) extends AbstractIterator[A] with Serializable { + this: IndexedSeqViewReverseIterator[A]^ => private[this] var remainder = self.length private[this] var pos = remainder - 1 @inline private[this] def _hasNext: Boolean = remainder > 0 @@ -98,7 +105,7 @@ object IndexedSeqView { // from < 0 means don't move pos, until < 0 means don't limit remainder // - override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { + override protected def sliceIterator(from: Int, until: Int): Iterator[A]^{this} = { if (_hasNext) { if (remainder <= from) remainder = 0 // exhausted by big skip else if (from <= 0) { // no skip, pos is same @@ -117,47 +124,47 @@ object IndexedSeqView { } } - /** An `IndexedSeqOps` whose collection type and collection type constructor are unknown */ - type SomeIndexedSeqOps[A] = IndexedSeqOps[A, AnyConstr, _] + /** An `IndexedSeqViewOps` whose collection type and collection type constructor are unknown */ + type SomeIndexedSeqOps[A] = IndexedSeqViewOps[A, AnyConstr, _] @SerialVersionUID(3L) - class Id[+A](underlying: SomeIndexedSeqOps[A]) + class Id[+A](underlying: SomeIndexedSeqOps[A]^) extends SeqView.Id(underlying) with IndexedSeqView[A] @SerialVersionUID(3L) - class Appended[+A](underlying: SomeIndexedSeqOps[A], elem: A) + class Appended[+A](underlying: SomeIndexedSeqOps[A]^, elem: A) extends SeqView.Appended(underlying, elem) with IndexedSeqView[A] @SerialVersionUID(3L) - class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A]) + class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A]^) extends SeqView.Prepended(elem, underlying) with IndexedSeqView[A] @SerialVersionUID(3L) - class Concat[A](prefix: SomeIndexedSeqOps[A], suffix: SomeIndexedSeqOps[A]) + class Concat[A](prefix: SomeIndexedSeqOps[A]^, suffix: SomeIndexedSeqOps[A]^) extends SeqView.Concat[A](prefix, suffix) with IndexedSeqView[A] @SerialVersionUID(3L) - class Take[A](underlying: SomeIndexedSeqOps[A], n: Int) + class Take[A](underlying: SomeIndexedSeqOps[A]^, n: Int) extends SeqView.Take(underlying, n) with IndexedSeqView[A] @SerialVersionUID(3L) - class TakeRight[A](underlying: SomeIndexedSeqOps[A], n: Int) + class TakeRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int) extends SeqView.TakeRight(underlying, n) with IndexedSeqView[A] @SerialVersionUID(3L) - class Drop[A](underlying: SomeIndexedSeqOps[A], n: Int) + class Drop[A](underlying: SomeIndexedSeqOps[A]^, n: Int) extends SeqView.Drop[A](underlying, n) with IndexedSeqView[A] @SerialVersionUID(3L) - class DropRight[A](underlying: SomeIndexedSeqOps[A], n: Int) + class DropRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int) extends SeqView.DropRight[A](underlying, n) with IndexedSeqView[A] @SerialVersionUID(3L) - class Map[A, B](underlying: SomeIndexedSeqOps[A], f: A => B) + class Map[A, B](underlying: SomeIndexedSeqOps[A]^, f: A => B) extends SeqView.Map(underlying, f) with IndexedSeqView[B] @SerialVersionUID(3L) - class Reverse[A](underlying: SomeIndexedSeqOps[A]) extends SeqView.Reverse[A](underlying) with IndexedSeqView[A] { + class Reverse[A](underlying: SomeIndexedSeqOps[A]^) extends SeqView.Reverse[A](underlying) with IndexedSeqView[A] { override def reverse: IndexedSeqView[A] = underlying match { case x: IndexedSeqView[A] => x case _ => super.reverse @@ -165,7 +172,7 @@ object IndexedSeqView { } @SerialVersionUID(3L) - class Slice[A](underlying: SomeIndexedSeqOps[A], from: Int, until: Int) extends AbstractIndexedSeqView[A] { + class Slice[A](underlying: SomeIndexedSeqOps[A]^, from: Int, until: Int) extends AbstractIndexedSeqView[A] { protected val lo = from max 0 protected val hi = (until max 0) min underlying.length protected val len = (hi - lo) max 0 diff --git a/tests/pos-special/stdlib/collection/Iterable.scala b/tests/pos-special/stdlib/collection/Iterable.scala index 04647f215963..bca80d7be108 100644 --- a/tests/pos-special/stdlib/collection/Iterable.scala +++ b/tests/pos-special/stdlib/collection/Iterable.scala @@ -17,6 +17,7 @@ import scala.annotation.nowarn import scala.annotation.unchecked.uncheckedVariance import scala.collection.mutable.Builder import scala.collection.View.{LeftPartitionMapped, RightPartitionMapped} +import language.experimental.captureChecking /** Base trait for generic collections. * @@ -28,6 +29,7 @@ import scala.collection.View.{LeftPartitionMapped, RightPartitionMapped} trait Iterable[+A] extends IterableOnce[A] with IterableOps[A, Iterable, Iterable[A]] with IterableFactoryDefaults[A, Iterable] { + this: Iterable[A]^ => // The collection itself @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") @@ -94,7 +96,7 @@ trait Iterable[+A] extends IterableOnce[A] * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. */ - def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, this.type] = new LazyZip2(this, this, that) + def lazyZip[B](that: Iterable[B]^): LazyZip2[A, B, this.type]^{this, that} = new LazyZip2(this, this, that) } /** Base trait for Iterable operations @@ -132,29 +134,31 @@ trait Iterable[+A] extends IterableOnce[A] * and may be nondeterministic. */ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with IterableOnceOps[A, CC, C] { + this: IterableOps[A, CC, C]^ => + /** * @return This collection as an `Iterable[A]`. No new collection will be built if `this` is already an `Iterable[A]`. */ // Should be `protected def asIterable`, or maybe removed altogether if it's not needed @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") - def toIterable: Iterable[A] + def toIterable: Iterable[A]^{this} /** Converts this $coll to an unspecified Iterable. Will return * the same collection if this instance is already Iterable. * @return An Iterable containing all elements of this $coll. */ @deprecated("toTraversable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.0") - final def toTraversable: Traversable[A] = toIterable + final def toTraversable: Traversable[A]^{this} = toIterable override def isTraversableAgain: Boolean = true /** * @return This collection as a `C`. */ - protected def coll: C + protected def coll: C^{this} @deprecated("Use coll instead of repr in a collection implementation, use the collection value itself from the outside", "2.13.0") - final def repr: C = coll + final def repr: C^{this} = coll /** * Defines how to turn a given `Iterable[A]` into a collection of type `C`. @@ -174,7 +178,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * `Iterable[A]` obtained from `this` collection (as it is the case in the * implementations of operations where we use a `View[A]`), it is safe. */ - protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): C + protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): C^{coll} /** The companion object of this ${coll}, providing various factory methods. * @@ -251,7 +255,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable def lastOption: Option[A] = if (isEmpty) None else Some(last) /** A view over the elements of this collection. */ - def view: View[A] = View.fromIteratorProvider(() => iterator) + def view: View[A]^{this} = View.fromIteratorProvider(() => iterator) /** Compares the size of this $coll to a test value. * @@ -301,7 +305,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * this.sizeIs > size // this.sizeCompare(size) > 0 * }}} */ - @inline final def sizeIs: IterableOps.SizeCompareOps = new IterableOps.SizeCompareOps(this) + @inline final def sizeIs: IterableOps.SizeCompareOps^{this} = new IterableOps.SizeCompareOps(this) /** Compares the size of this $coll to the size of another `Iterable`. * @@ -317,7 +321,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * is `O(this.size min that.size)` instead of `O(this.size + that.size)`. * The method should be overridden if computing `size` is cheap and `knownSize` returns `-1`. */ - def sizeCompare(that: Iterable[_]): Int = { + def sizeCompare(that: Iterable[_]^): Int = { val thatKnownSize = that.knownSize if (thatKnownSize >= 0) this sizeCompare thatKnownSize @@ -342,7 +346,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable /** A view over a slice of the elements of this collection. */ @deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0") - def view(from: Int, until: Int): View[A] = view.slice(from, until) + def view(from: Int, until: Int): View[A]^{this} = view.slice(from, until) /** Transposes this $coll of iterable collections into * a $coll of ${coll}s. @@ -378,7 +382,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @throws IllegalArgumentException if all collections in this $coll * are not of the same size. */ - def transpose[B](implicit asIterable: A => /*<: /*<: Boolean): C = fromSpecific(new View.Filter(this, pred, isFlipped = false)) + def filter(pred: A => Boolean): C^{this, pred} = fromSpecific(new View.Filter(this, pred, isFlipped = false)) - def filterNot(pred: A => Boolean): C = fromSpecific(new View.Filter(this, pred, isFlipped = true)) + def filterNot(pred: A => Boolean): C^{this, pred} = fromSpecific(new View.Filter(this, pred, isFlipped = true)) /** Creates a non-strict filter of this $coll. * @@ -417,7 +421,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * All these operations apply to those elements of this $coll * which satisfy the predicate `p`. */ - def withFilter(p: A => Boolean): collection.WithFilter[A, CC] = new IterableOps.WithFilter(this, p) + def withFilter(p: A => Boolean): collection.WithFilter[A, CC]^{this, p} = new IterableOps.WithFilter(this, p) /** A pair of, first, all elements that satisfy predicate `p` and, second, * all elements that do not. Interesting because it splits a collection in two. @@ -426,15 +430,15 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * Strict collections have an overridden version of `partition` in `StrictOptimizedIterableOps`, * which requires only a single traversal. */ - def partition(p: A => Boolean): (C, C) = { + def partition(p: A => Boolean): (C^{this, p}, C^{this, p}) = { val first = new View.Filter(this, p, false) val second = new View.Filter(this, p, true) (fromSpecific(first), fromSpecific(second)) } - override def splitAt(n: Int): (C, C) = (take(n), drop(n)) + override def splitAt(n: Int): (C^{this}, C^{this}) = (take(n), drop(n)) - def take(n: Int): C = fromSpecific(new View.Take(this, n)) + def take(n: Int): C^{this} = fromSpecific(new View.Take(this, n)) /** Selects the last ''n'' elements. * $orderDependent @@ -443,7 +447,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * or else the whole $coll, if it has less than `n` elements. * If `n` is negative, returns an empty $coll. */ - def takeRight(n: Int): C = fromSpecific(new View.TakeRight(this, n)) + def takeRight(n: Int): C^{this} = fromSpecific(new View.TakeRight(this, n)) /** Takes longest prefix of elements that satisfy a predicate. * $orderDependent @@ -451,11 +455,11 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return the longest prefix of this $coll whose elements all satisfy * the predicate `p`. */ - def takeWhile(p: A => Boolean): C = fromSpecific(new View.TakeWhile(this, p)) + def takeWhile(p: A => Boolean): C^{this, p} = fromSpecific(new View.TakeWhile(this, p)) - def span(p: A => Boolean): (C, C) = (takeWhile(p), dropWhile(p)) + def span(p: A => Boolean): (C^{this, p}, C^{this, p}) = (takeWhile(p), dropWhile(p)) - def drop(n: Int): C = fromSpecific(new View.Drop(this, n)) + def drop(n: Int): C^{this} = fromSpecific(new View.Drop(this, n)) /** Selects all elements except last ''n'' ones. * $orderDependent @@ -464,9 +468,9 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * empty $coll, if this $coll has less than `n` elements. * If `n` is negative, don't drop any elements. */ - def dropRight(n: Int): C = fromSpecific(new View.DropRight(this, n)) + def dropRight(n: Int): C^{this} = fromSpecific(new View.DropRight(this, n)) - def dropWhile(p: A => Boolean): C = fromSpecific(new View.DropWhile(this, p)) + def dropWhile(p: A => Boolean): C^{this, p} = fromSpecific(new View.DropWhile(this, p)) /** Partitions elements in fixed size ${coll}s. * @see [[scala.collection.Iterator]], method `grouped` @@ -475,7 +479,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return An iterator producing ${coll}s of size `size`, except the * last will be less than size `size` if the elements don't divide evenly. */ - def grouped(size: Int): Iterator[C] = + def grouped(size: Int): Iterator[C^{this}]^{this} = iterator.grouped(size).map(fromSpecific) /** Groups elements in fixed size blocks by passing a "sliding window" @@ -497,7 +501,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @example `List(1, 2).sliding(2) = Iterator(List(1, 2))` * @example `List(1, 2, 3).sliding(2) = Iterator(List(1, 2), List(2, 3))` */ - def sliding(size: Int): Iterator[C] = sliding(size, 1) + def sliding(size: Int): Iterator[C^{this}]^{this} = sliding(size, 1) /** Groups elements in fixed size blocks by passing a "sliding window" * over them (as opposed to partitioning them, as is done in grouped.) @@ -516,13 +520,13 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * element (which may be the only element) will be smaller * if there are fewer than `size` elements remaining to be grouped. * @example `List(1, 2, 3, 4, 5).sliding(2, 2) = Iterator(List(1, 2), List(3, 4), List(5))` - * @example `List(1, 2, 3, 4, 5, 6).sliding(2, 3) = Iterator(List(1, 2), List(4, 5))` + * @example `List(1, 2, 3, 4, 5, 6).sliding(2, 3) = Iterator(List(1, 2), List(4, 5))` */ - def sliding(size: Int, step: Int): Iterator[C] = + def sliding(size: Int, step: Int): Iterator[C^{this}]^{this} = iterator.sliding(size, step).map(fromSpecific) /** The rest of the collection without its first element. */ - def tail: C = { + def tail: C^{this} = { if (isEmpty) throw new UnsupportedOperationException drop(1) } @@ -530,12 +534,12 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable /** The initial part of the collection without its last element. * $willForceEvaluation */ - def init: C = { + def init: C^{this} = { if (isEmpty) throw new UnsupportedOperationException dropRight(1) } - def slice(from: Int, until: Int): C = + def slice(from: Int, until: Int): C^{this} = fromSpecific(new View.Drop(new View.Take(this, until), from)) /** Partitions this $coll into a map of ${coll}s according to some discriminator function. @@ -645,9 +649,9 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * * @return a new $coll containing the prefix scan of the elements in this $coll */ - def scan[B >: A](z: B)(op: (B, B) => B): CC[B] = scanLeft(z)(op) + def scan[B >: A](z: B)(op: (B, B) => B): CC[B]^{this, op} = scanLeft(z)(op) - def scanLeft[B](z: B)(op: (B, A) => B): CC[B] = iterableFactory.from(new View.ScanLeft(this, z, op)) + def scanLeft[B](z: B)(op: (B, A) => B): CC[B]^{this, op} = iterableFactory.from(new View.ScanLeft(this, z, op)) /** Produces a collection containing cumulative results of applying the operator going right to left. * The head of the collection is the last cumulative result. @@ -665,7 +669,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @param op the binary operator applied to the intermediate result and the element * @return collection with intermediate results */ - def scanRight[B](z: B)(op: (A, B) => B): CC[B] = { + def scanRight[B](z: B)(op: (A, B) => B): CC[B]^{this, op} = { class Scanner extends runtime.AbstractFunction1[A, Unit] { var acc = z var scanned = acc :: immutable.Nil @@ -679,13 +683,13 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable iterableFactory.from(scanner.scanned) } - def map[B](f: A => B): CC[B] = iterableFactory.from(new View.Map(this, f)) + def map[B](f: A => B): CC[B]^{this, f} = iterableFactory.from(new View.Map(this, f)) - def flatMap[B](f: A => IterableOnce[B]): CC[B] = iterableFactory.from(new View.FlatMap(this, f)) + def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} = iterableFactory.from(new View.FlatMap(this, f)) - def flatten[B](implicit asIterable: A => IterableOnce[B]): CC[B] = flatMap(asIterable) + def flatten[B](implicit asIterable: A -> IterableOnce[B]): CC[B]^{this} = flatMap(asIterable) - def collect[B](pf: PartialFunction[A, B]): CC[B] = + def collect[B](pf: PartialFunction[A, B]^): CC[B]^{this, pf} = iterableFactory.from(new View.Collect(this, pf)) /** Applies a function `f` to each element of the $coll and returns a pair of ${coll}s: the first one @@ -706,12 +710,12 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @tparam A2 the element type of the second resulting collection * @param f the 'split function' mapping the elements of this $coll to an [[scala.util.Either]] * - * @return a pair of ${coll}s: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], + * @return a pair of ${coll}s: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], * and the second one made of those wrapped in [[scala.util.Right]]. */ - def partitionMap[A1, A2](f: A => Either[A1, A2]): (CC[A1], CC[A2]) = { - val left: View[A1] = new LeftPartitionMapped(this, f) - val right: View[A2] = new RightPartitionMapped(this, f) + def partitionMap[A1, A2](f: A => Either[A1, A2]): (CC[A1]^{this, f}, CC[A2]^{this, f}) = { + val left: View[A1]^{f, this} = new LeftPartitionMapped(this, f) + val right: View[A2]^{f, this} = new RightPartitionMapped(this, f) (iterableFactory.from(left), iterableFactory.from(right)) } @@ -724,13 +728,13 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return a new $coll which contains all elements * of this $coll followed by all elements of `suffix`. */ - def concat[B >: A](suffix: IterableOnce[B]): CC[B] = iterableFactory.from(suffix match { + def concat[B >: A](suffix: IterableOnce[B]^): CC[B]^{this, suffix} = iterableFactory.from(suffix match { case xs: Iterable[B] => new View.Concat(this, xs) case xs => iterator ++ suffix.iterator }) /** Alias for `concat` */ - @`inline` final def ++ [B >: A](suffix: IterableOnce[B]): CC[B] = concat(suffix) + @`inline` final def ++ [B >: A](suffix: IterableOnce[B]^): CC[B]^{this, suffix} = concat(suffix) /** Returns a $coll formed from this $coll and another iterable collection * by combining corresponding elements in pairs. @@ -741,12 +745,12 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return a new $coll containing pairs consisting of corresponding elements of this $coll and `that`. * The length of the returned collection is the minimum of the lengths of this $coll and `that`. */ - def zip[B](that: IterableOnce[B]): CC[(A @uncheckedVariance, B)] = iterableFactory.from(that match { // sound bcs of VarianceNote + def zip[B](that: IterableOnce[B]^): CC[(A @uncheckedVariance, B)]^{this, that} = iterableFactory.from(that match { // sound bcs of VarianceNote case that: Iterable[B] => new View.Zip(this, that) case _ => iterator.zip(that) }) - def zipWithIndex: CC[(A @uncheckedVariance, Int)] = iterableFactory.from(new View.ZipWithIndex(this)) + def zipWithIndex: CC[(A @uncheckedVariance, Int)]^{this} = iterableFactory.from(new View.ZipWithIndex(this)) /** Returns a $coll formed from this $coll and another iterable collection * by combining corresponding elements in pairs. @@ -762,7 +766,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * If this $coll is shorter than `that`, `thisElem` values are used to pad the result. * If `that` is shorter than this $coll, `thatElem` values are used to pad the result. */ - def zipAll[A1 >: A, B](that: Iterable[B], thisElem: A1, thatElem: B): CC[(A1, B)] = iterableFactory.from(new View.ZipAll(this, that, thisElem, thatElem)) + def zipAll[A1 >: A, B](that: Iterable[B]^, thisElem: A1, thatElem: B): CC[(A1, B)]^{this, that} = iterableFactory.from(new View.ZipAll(this, that, thisElem, thatElem)) /** Converts this $coll of pairs into two collections of the first and second * half of each pair. @@ -783,9 +787,9 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return a pair of ${coll}s, containing the first, respectively second * half of each element pair of this $coll. */ - def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = { - val first: View[A1] = new View.Map[A, A1](this, asPair(_)._1) - val second: View[A2] = new View.Map[A, A2](this, asPair(_)._2) + def unzip[A1, A2](implicit asPair: A -> (A1, A2)): (CC[A1]^{this}, CC[A2]^{this}) = { + val first: View[A1]^{this} = new View.Map[A, A1](this, asPair(_)._1) + val second: View[A2]^{this} = new View.Map[A, A2](this, asPair(_)._2) (iterableFactory.from(first), iterableFactory.from(second)) } @@ -810,10 +814,10 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return a triple of ${coll}s, containing the first, second, respectively * third member of each element triple of this $coll. */ - def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { - val first: View[A1] = new View.Map[A, A1](this, asTriple(_)._1) - val second: View[A2] = new View.Map[A, A2](this, asTriple(_)._2) - val third: View[A3] = new View.Map[A, A3](this, asTriple(_)._3) + def unzip3[A1, A2, A3](implicit asTriple: A -> (A1, A2, A3)): (CC[A1]^{this}, CC[A2]^{this}, CC[A3]^{this}) = { + val first: View[A1]^{this} = new View.Map[A, A1](this, asTriple(_)._1) + val second: View[A2]^{this} = new View.Map[A, A2](this, asTriple(_)._2) + val third: View[A3]^{this} = new View.Map[A, A3](this, asTriple(_)._3) (iterableFactory.from(first), iterableFactory.from(second), iterableFactory.from(third)) } @@ -824,7 +828,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return an iterator over all the tails of this $coll * @example `List(1,2,3).tails = Iterator(List(1,2,3), List(2,3), List(3), Nil)` */ - def tails: Iterator[C] = iterateUntilEmpty(_.tail) + def tails: Iterator[C^{this}]^{this} = iterateUntilEmpty(_.tail) /** Iterates over the inits of this $coll. The first value will be this * $coll and the final one will be an empty $coll, with the intervening @@ -835,21 +839,24 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable * @return an iterator over all the inits of this $coll * @example `List(1,2,3).inits = Iterator(List(1,2,3), List(1,2), List(1), Nil)` */ - def inits: Iterator[C] = iterateUntilEmpty(_.init) + def inits: Iterator[C^{this}]^{this} = iterateUntilEmpty(_.init) - override def tapEach[U](f: A => U): C = fromSpecific(new View.Map(this, { (a: A) => f(a); a })) + override def tapEach[U](f: A => U): C^{this, f} = fromSpecific(new View.Map(this, { (a: A) => f(a); a })) // A helper for tails and inits. - private[this] def iterateUntilEmpty(f: Iterable[A] => Iterable[A]): Iterator[C] = { + private[this] def iterateUntilEmpty(f: Iterable[A]^{this} => Iterable[A]^{this}): Iterator[C^{this}]^{this, f} = { // toIterable ties the knot between `this: IterableOnceOps[A, CC, C]` and `this.tail: C` // `this.tail.tail` doesn't compile as `C` is unbounded // `Iterable.from(this)` would eagerly copy non-immutable collections - val it = Iterator.iterate(toIterable: @nowarn("cat=deprecation"))(f).takeWhile(_.nonEmpty) + val it = Iterator.iterate(toIterable: @nowarn("cat=deprecation"))(f) + .takeWhile((itble: Iterable[A]^) => itble.iterator.nonEmpty) + // CC TODO type annotation for itble needed. + // The previous code `.takeWhile(_.iterator.nonEmpty)` does not work. (it ++ Iterator.single(Iterable.empty)).map(fromSpecific) } @deprecated("Use ++ instead of ++: for collections of type Iterable", "2.13.0") - def ++:[B >: A](that: IterableOnce[B]): CC[B] = iterableFactory.from(that match { + def ++:[B >: A](that: IterableOnce[B]^): CC[B]^{this, that} = iterableFactory.from(that match { case xs: Iterable[B] => new View.Concat(xs, this) case _ => that.iterator ++ iterator }) @@ -862,7 +869,8 @@ object IterableOps { * These operations are implemented in terms of * [[scala.collection.IterableOps.sizeCompare(Int) `sizeCompare(Int)`]]. */ - final class SizeCompareOps private[collection](val it: IterableOps[_, AnyConstr, _]) extends AnyVal { + final class SizeCompareOps private[collection](val it: IterableOps[_, AnyConstr, _]^) extends AnyVal { + this: SizeCompareOps^{it} => /** Tests if the size of the collection is less than some value. */ @inline def <(size: Int): Boolean = it.sizeCompare(size) < 0 /** Tests if the size of the collection is less than or equal to some value. */ @@ -887,22 +895,22 @@ object IterableOps { */ @SerialVersionUID(3L) class WithFilter[+A, +CC[_]]( - self: IterableOps[A, CC, _], + self: IterableOps[A, CC, _]^, p: A => Boolean ) extends collection.WithFilter[A, CC] with Serializable { - protected def filtered: Iterable[A] = + protected def filtered: Iterable[A]^{this} = new View.Filter(self, p, isFlipped = false) - def map[B](f: A => B): CC[B] = + def map[B](f: A => B): CC[B]^{this, f} = self.iterableFactory.from(new View.Map(filtered, f)) - def flatMap[B](f: A => IterableOnce[B]): CC[B] = + def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} = self.iterableFactory.from(new View.FlatMap(filtered, f)) def foreach[U](f: A => U): Unit = filtered.foreach(f) - def withFilter(q: A => Boolean): WithFilter[A, CC] = + def withFilter(q: A => Boolean): WithFilter[A, CC]^{this, q} = new WithFilter(self, (a: A) => p(a) && q(a)) } @@ -940,7 +948,7 @@ abstract class AbstractIterable[+A] extends Iterable[A] * same as `C`. */ trait IterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]]] extends IterableOps[A, CC, CC[A @uncheckedVariance]] { - protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = iterableFactory.from(coll) + protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): CC[A @uncheckedVariance]^{coll} = iterableFactory.from(coll) protected def newSpecificBuilder: Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = iterableFactory.newBuilder[A] // overridden for efficiency, since we know CC[A] =:= C @@ -958,7 +966,7 @@ trait IterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]]] extends I trait EvidenceIterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]], Ev[_]] extends IterableOps[A, CC, CC[A @uncheckedVariance]] { protected def evidenceIterableFactory: EvidenceIterableFactory[CC, Ev] implicit protected def iterableEvidence: Ev[A @uncheckedVariance] - override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = evidenceIterableFactory.from(coll) + override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): CC[A @uncheckedVariance]^{coll} = evidenceIterableFactory.from(coll) override protected def newSpecificBuilder: Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = evidenceIterableFactory.newBuilder[A] override def empty: CC[A @uncheckedVariance] = evidenceIterableFactory.empty } @@ -980,11 +988,11 @@ trait SortedSetFactoryDefaults[+A, +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Set[x]] extends SortedSetOps[A @uncheckedVariance, CC, CC[A @uncheckedVariance]] { self: IterableOps[A, WithFilterCC, _] => - override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = sortedIterableFactory.from(coll)(ordering) + override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): CC[A @uncheckedVariance]^{coll} = sortedIterableFactory.from(coll)(ordering) override protected def newSpecificBuilder: mutable.Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = sortedIterableFactory.newBuilder[A](ordering) override def empty: CC[A @uncheckedVariance] = sortedIterableFactory.empty(ordering) - override def withFilter(p: A => Boolean): SortedSetOps.WithFilter[A, WithFilterCC, CC] = + override def withFilter(p: A => Boolean): SortedSetOps.WithFilter[A, WithFilterCC, CC]^{p} = new SortedSetOps.WithFilter[A, WithFilterCC, CC](this, p) } @@ -1004,7 +1012,8 @@ trait SortedSetFactoryDefaults[+A, trait MapFactoryDefaults[K, +V, +CC[x, y] <: IterableOps[(x, y), Iterable, Iterable[(x, y)]], +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Iterable[x]] extends MapOps[K, V, CC, CC[K, V @uncheckedVariance]] with IterableOps[(K, V), WithFilterCC, CC[K, V @uncheckedVariance]] { - override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]): CC[K, V @uncheckedVariance] = mapFactory.from(coll) + this: MapFactoryDefaults[K, V, CC, WithFilterCC] => + override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]^): CC[K, V @uncheckedVariance]^{coll} = mapFactory.from(coll) override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = mapFactory.newBuilder[K, V] override def empty: CC[K, V @uncheckedVariance] = (this: AnyRef) match { // Implemented here instead of in TreeSeqMap since overriding empty in TreeSeqMap is not forwards compatible (should be moved) @@ -1012,7 +1021,7 @@ trait MapFactoryDefaults[K, +V, case _ => mapFactory.empty } - override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, WithFilterCC, CC] = + override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, WithFilterCC, CC]^{p} = new MapOps.WithFilter[K, V, WithFilterCC, CC](this, p) } @@ -1035,9 +1044,9 @@ trait SortedMapFactoryDefaults[K, +V, self: IterableOps[(K, V), WithFilterCC, _] => override def empty: CC[K, V @uncheckedVariance] = sortedMapFactory.empty(ordering) - override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]): CC[K, V @uncheckedVariance] = sortedMapFactory.from(coll)(ordering) + override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]^): CC[K, V @uncheckedVariance]^{coll} = sortedMapFactory.from(coll)(ordering) override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = sortedMapFactory.newBuilder[K, V](ordering) - override def withFilter(p: ((K, V)) => Boolean): collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC] = + override def withFilter(p: ((K, V)) => Boolean): collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC]^{p} = new collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC](this, p) } diff --git a/tests/pos-special/stdlib/collection/IterableOnce.scala b/tests/pos-special/stdlib/collection/IterableOnce.scala index 65d8dce08ae4..a88be4943c58 100644 --- a/tests/pos-special/stdlib/collection/IterableOnce.scala +++ b/tests/pos-special/stdlib/collection/IterableOnce.scala @@ -14,12 +14,13 @@ package scala package collection import scala.annotation.tailrec -import scala.annotation.unchecked.uncheckedVariance +import scala.annotation.unchecked.{uncheckedVariance, uncheckedCaptures} import scala.collection.mutable.StringBuilder import scala.language.implicitConversions import scala.math.{Numeric, Ordering} import scala.reflect.ClassTag import scala.runtime.AbstractFunction2 +import language.experimental.captureChecking /** * A template trait for collections which can be traversed either once only @@ -42,8 +43,10 @@ import scala.runtime.AbstractFunction2 * @define coll collection */ trait IterableOnce[+A] extends Any { + this: IterableOnce[A]^ => + /** Iterator can be used only once */ - def iterator: Iterator[A] + def iterator: Iterator[A]^{this} /** Returns a [[scala.collection.Stepper]] for the elements of this collection. * @@ -65,9 +68,9 @@ trait IterableOnce[+A] extends Any { * allow creating parallel streams, whereas bare Steppers can be converted only to sequential * streams. */ - def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S = { + def stepper[S <: Stepper[_]^{this}](implicit shape: StepperShape[A, S]): S = { import convert.impl._ - val s = shape.shape match { + val s: Any = shape.shape match { case StepperShape.IntShape => new IntIteratorStepper (iterator.asInstanceOf[Iterator[Int]]) case StepperShape.LongShape => new LongIteratorStepper (iterator.asInstanceOf[Iterator[Long]]) case StepperShape.DoubleShape => new DoubleIteratorStepper(iterator.asInstanceOf[Iterator[Double]]) @@ -84,7 +87,7 @@ trait IterableOnce[+A] extends Any { final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) extends AnyVal { @deprecated("Use .iterator.withFilter(...) instead", "2.13.0") - def withFilter(f: A => Boolean): Iterator[A] = it.iterator.withFilter(f) + def withFilter(f: A => Boolean): Iterator[A]^{f} = it.iterator.withFilter(f) @deprecated("Use .iterator.reduceLeftOption(...) instead", "2.13.0") def reduceLeftOption(f: (A, A) => A): Option[A] = it.iterator.reduceLeftOption(f) @@ -102,7 +105,7 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext def reduceRight(f: (A, A) => A): A = it.iterator.reduceRight(f) @deprecated("Use .iterator.maxBy(...) instead", "2.13.0") - def maxBy[B](f: A => B)(implicit cmp: Ordering[B]): A = it.iterator.maxBy(f) + def maxBy[B](f: A -> B)(implicit cmp: Ordering[B]): A = it.iterator.maxBy(f) @deprecated("Use .iterator.reduceLeft(...) instead", "2.13.0") def reduceLeft(f: (A, A) => A): A = it.iterator.reduceLeft(f) @@ -120,7 +123,7 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext def reduceOption(f: (A, A) => A): Option[A] = it.iterator.reduceOption(f) @deprecated("Use .iterator.minBy(...) instead", "2.13.0") - def minBy[B](f: A => B)(implicit cmp: Ordering[B]): A = it.iterator.minBy(f) + def minBy[B](f: A -> B)(implicit cmp: Ordering[B]): A = it.iterator.minBy(f) @deprecated("Use .iterator.size instead", "2.13.0") def size: Int = it.iterator.size @@ -132,7 +135,7 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext def collectFirst[B](f: PartialFunction[A, B]): Option[B] = it.iterator.collectFirst(f) @deprecated("Use .iterator.filter(...) instead", "2.13.0") - def filter(f: A => Boolean): Iterator[A] = it.iterator.filter(f) + def filter(f: A => Boolean): Iterator[A]^{f} = it.iterator.filter(f) @deprecated("Use .iterator.exists(...) instead", "2.13.0") def exists(f: A => Boolean): Boolean = it.iterator.exists(f) @@ -159,10 +162,10 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext def to[C1](factory: Factory[A, C1]): C1 = factory.fromSpecific(it) @deprecated("Use .iterator.to(ArrayBuffer) instead", "2.13.0") - def toBuffer[B >: A]: mutable.Buffer[B] = mutable.ArrayBuffer.from(it) + def toBuffer[sealed B >: A]: mutable.Buffer[B] = mutable.ArrayBuffer.from(it) @deprecated("Use .iterator.toArray", "2.13.0") - def toArray[B >: A: ClassTag]: Array[B] = it match { + def toArray[sealed B >: A: ClassTag]: Array[B] = it match { case it: Iterable[B] => it.toArray[B] case _ => it.iterator.toArray[B] } @@ -238,13 +241,13 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext @`inline` def :\ [B](z: B)(op: (A, B) => B): B = foldRight[B](z)(op) @deprecated("Use .iterator.map instead or consider requiring an Iterable", "2.13.0") - def map[B](f: A => B): IterableOnce[B] = it match { + def map[B](f: A => B): IterableOnce[B]^{f} = it match { case it: Iterable[A] => it.map(f) case _ => it.iterator.map(f) } @deprecated("Use .iterator.flatMap instead or consider requiring an Iterable", "2.13.0") - def flatMap[B](f: A => IterableOnce[B]): IterableOnce[B] = it match { + def flatMap[B](f: A => IterableOnce[B]^): IterableOnce[B]^{f} = it match { case it: Iterable[A] => it.flatMap(f) case _ => it.iterator.flatMap(f) } @@ -269,10 +272,11 @@ object IterableOnce { math.max(math.min(math.min(len, srcLen), destLen - start), 0) /** Calls `copyToArray` on the given collection, regardless of whether or not it is an `Iterable`. */ - @inline private[collection] def copyElemsToArray[A, B >: A](elems: IterableOnce[A], - xs: Array[B], - start: Int = 0, - len: Int = Int.MaxValue): Int = + @inline private[collection] def copyElemsToArray[A, sealed B >: A]( + elems: IterableOnce[A]^, + xs: Array[B], + start: Int = 0, + len: Int = Int.MaxValue): Int = elems match { case src: Iterable[A] => src.copyToArray[B](xs, start, len) case src => src.iterator.copyToArray[B](xs, start, len) @@ -315,9 +319,11 @@ object IterableOnce { * @define coll collection * */ -trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => +trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ => /////////////////////////////////////////////////////////////// Abstract methods that must be implemented + import IterableOnceOps.Maximized + /** Produces a $coll containing cumulative results of applying the * operator going left to right, including the initial value. * @@ -329,7 +335,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @param op the binary operator applied to the intermediate result and the element * @return collection with intermediate results */ - def scanLeft[B](z: B)(op: (B, A) => B): CC[B] + def scanLeft[B](z: B)(op: (B, A) => B): CC[B]^{this, op} /** Selects all elements of this $coll which satisfy a predicate. * @@ -337,7 +343,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return a new $coll consisting of all elements of this $coll that satisfy the given * predicate `p`. The order of the elements is preserved. */ - def filter(p: A => Boolean): C + def filter(p: A => Boolean): C^{this, p} /** Selects all elements of this $coll which do not satisfy a predicate. * @@ -345,7 +351,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return a new $coll consisting of all elements of this $coll that do not satisfy the given * predicate `pred`. Their order may not be preserved. */ - def filterNot(pred: A => Boolean): C + def filterNot(p: A => Boolean): C^{this, p} /** Selects the first ''n'' elements. * $orderDependent @@ -354,7 +360,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * or else the whole $coll, if it has less than `n` elements. * If `n` is negative, returns an empty $coll. */ - def take(n: Int): C + def take(n: Int): C^{this} /** Takes longest prefix of elements that satisfy a predicate. * $orderDependent @@ -362,7 +368,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return the longest prefix of this $coll whose elements all satisfy * the predicate `p`. */ - def takeWhile(p: A => Boolean): C + def takeWhile(p: A => Boolean): C^{this, p} /** Selects all elements except first ''n'' ones. * $orderDependent @@ -371,7 +377,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * empty $coll, if this $coll has less than `n` elements. * If `n` is negative, don't drop any elements. */ - def drop(n: Int): C + def drop(n: Int): C^{this} /** Drops longest prefix of elements that satisfy a predicate. * $orderDependent @@ -379,7 +385,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return the longest suffix of this $coll whose first element * does not satisfy the predicate `p`. */ - def dropWhile(p: A => Boolean): C + def dropWhile(p: A => Boolean): C^{this, p} /** Selects an interval of elements. The returned $coll is made up * of all elements `x` which satisfy the invariant: @@ -394,7 +400,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * index `from` extending up to (but not including) index `until` * of this $coll. */ - def slice(from: Int, until: Int): C + def slice(from: Int, until: Int): C^{this} /** Builds a new $coll by applying a function to all elements of this $coll. * @@ -403,7 +409,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return a new $coll resulting from applying the given function * `f` to each element of this $coll and collecting the results. */ - def map[B](f: A => B): CC[B] + def map[B](f: A => B): CC[B]^{this, f} /** Builds a new $coll by applying a function to all elements of this $coll * and using the elements of the resulting collections. @@ -436,7 +442,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return a new $coll resulting from applying the given collection-valued function * `f` to each element of this $coll and concatenating the results. */ - def flatMap[B](f: A => IterableOnce[B]): CC[B] + def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} /** Converts this $coll of iterable collections into * a $coll formed by the elements of these iterable @@ -464,7 +470,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * type of this $coll is an `Iterable`. * @return a new $coll resulting from concatenating all element ${coll}s. */ - def flatten[B](implicit asIterable: A => IterableOnce[B]): CC[B] + def flatten[B](implicit asIterable: A -> IterableOnce[B]): CC[B]^{this} /** Builds a new $coll by applying a partial function to all elements of this $coll * on which the function is defined. @@ -475,7 +481,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * `pf` to each element on which it is defined and collecting the results. * The order of the elements is preserved. */ - def collect[B](pf: PartialFunction[A, B]): CC[B] + def collect[B](pf: PartialFunction[A, B]^): CC[B]^{this, pf} /** Zips this $coll with its indices. * @@ -484,7 +490,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @example * `List("a", "b", "c").zipWithIndex == List(("a", 0), ("b", 1), ("c", 2))` */ - def zipWithIndex: CC[(A @uncheckedVariance, Int)] + def zipWithIndex: CC[(A @uncheckedVariance, Int)]^{this} /** Splits this $coll into a prefix/suffix pair according to a predicate. * @@ -497,7 +503,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return a pair consisting of the longest prefix of this $coll whose * elements all satisfy `p`, and the rest of this $coll. */ - def span(p: A => Boolean): (C, C) + def span(p: A => Boolean): (C^{this, p}, C^{this, p}) /** Splits this $coll into a prefix/suffix pair at a given position. * @@ -509,7 +515,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return a pair of ${coll}s consisting of the first `n` * elements of this $coll, and the other elements. */ - def splitAt(n: Int): (C, C) = { + def splitAt(n: Int): (C^{this}, C^{this}) = { class Spanner extends runtime.AbstractFunction1[A, Boolean] { var i = 0 def apply(a: A) = i < n && { i += 1 ; true } @@ -527,7 +533,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @tparam U the return type of f * @return The same logical collection as this */ - def tapEach[U](f: A => U): C + def tapEach[U](f: A => U): C^{this, f} /////////////////////////////////////////////////////////////// Concrete methods based on iterator @@ -802,7 +808,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => case _ => Some(reduceLeft(op)) } private final def reduceLeftOptionIterator[B >: A](op: (B, A) => B): Option[B] = reduceOptionIterator[A, B](iterator)(op) - private final def reduceOptionIterator[X >: A, B >: X](it: Iterator[X])(op: (B, X) => B): Option[B] = { + private final def reduceOptionIterator[X >: A, B >: X](it: Iterator[X]^)(op: (B, X) => B): Option[B] = { if (it.hasNext) { var acc: B = it.next() while (it.hasNext) @@ -884,7 +890,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @note Reuse: $consumesIterator */ @deprecatedOverriding("This should always forward to the 3-arg version of this method", since = "2.13.4") - def copyToArray[B >: A](xs: Array[B]): Int = copyToArray(xs, 0, Int.MaxValue) + def copyToArray[sealed B >: A](xs: Array[B]): Int = copyToArray(xs, 0, Int.MaxValue) /** Copy elements to an array, returning the number of elements written. * @@ -901,7 +907,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @note Reuse: $consumesIterator */ @deprecatedOverriding("This should always forward to the 3-arg version of this method", since = "2.13.4") - def copyToArray[B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) + def copyToArray[sealed B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) /** Copy elements to an array, returning the number of elements written. * @@ -918,7 +924,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * * @note Reuse: $consumesIterator */ - def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { val it = iterator var i = start val end = start + math.min(len, xs.length - start) @@ -1041,35 +1047,12 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return the first element of this $coll with the largest value measured by function f * with respect to the ordering `cmp`. */ - def maxBy[B](f: A => B)(implicit ord: Ordering[B]): A = + def maxBy[B](f: A -> B)(implicit ord: Ordering[B]): A = knownSize match { case 0 => throw new UnsupportedOperationException("empty.maxBy") case _ => foldLeft(new Maximized[A, B]("maxBy")(f)(ord.gt))((m, a) => m(m, a)).result } - private class Maximized[X, B](descriptor: String)(f: X => B)(cmp: (B, B) => Boolean) extends AbstractFunction2[Maximized[X, B], X, Maximized[X, B]] { - var maxElem: X = null.asInstanceOf[X] - var maxF: B = null.asInstanceOf[B] - var nonEmpty = false - def toOption: Option[X] = if (nonEmpty) Some(maxElem) else None - def result: X = if (nonEmpty) maxElem else throw new UnsupportedOperationException(s"empty.$descriptor") - def apply(m: Maximized[X, B], a: X): Maximized[X, B] = - if (m.nonEmpty) { - val fa = f(a) - if (cmp(fa, maxF)) { - maxF = fa - maxElem = a - } - m - } - else { - m.nonEmpty = true - m.maxElem = a - m.maxF = f(a) - m - } - } - /** Finds the first element which yields the largest value measured by function f. * * $willNotTerminateInf @@ -1080,7 +1063,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return an option value containing the first element of this $coll with the * largest value measured by function f with respect to the ordering `cmp`. */ - def maxByOption[B](f: A => B)(implicit ord: Ordering[B]): Option[A] = + def maxByOption[B](f: A -> B)(implicit ord: Ordering[B]): Option[A] = knownSize match { case 0 => None case _ => foldLeft(new Maximized[A, B]("maxBy")(f)(ord.gt))((m, a) => m(m, a)).toOption @@ -1097,7 +1080,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * @return the first element of this $coll with the smallest value measured by function f * with respect to the ordering `cmp`. */ - def minBy[B](f: A => B)(implicit ord: Ordering[B]): A = + def minBy[B](f: A -> B)(implicit ord: Ordering[B]): A = knownSize match { case 0 => throw new UnsupportedOperationException("empty.minBy") case _ => foldLeft(new Maximized[A, B]("minBy")(f)(ord.lt))((m, a) => m(m, a)).result @@ -1114,7 +1097,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => * with the smallest value measured by function f * with respect to the ordering `cmp`. */ - def minByOption[B](f: A => B)(implicit ord: Ordering[B]): Option[A] = + def minByOption[B](f: A -> B)(implicit ord: Ordering[B]): Option[A] = knownSize match { case 0 => None case _ => foldLeft(new Maximized[A, B]("minBy")(f)(ord.lt))((m, a) => m(m, a)).toOption @@ -1310,7 +1293,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => def to[C1](factory: Factory[A, C1]): C1 = factory.fromSpecific(this) @deprecated("Use .iterator instead of .toIterator", "2.13.0") - @`inline` final def toIterator: Iterator[A] = iterator + @`inline` final def toIterator: Iterator[A]^{this} = iterator def toList: immutable.List[A] = immutable.List.from(this) @@ -1330,13 +1313,13 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => @deprecated("Use .to(LazyList) instead of .toStream", "2.13.0") @`inline` final def toStream: immutable.Stream[A] = to(immutable.Stream) - @`inline` final def toBuffer[B >: A]: mutable.Buffer[B] = mutable.Buffer.from(this) + @`inline` final def toBuffer[sealed B >: A]: mutable.Buffer[B] = mutable.Buffer.from(this) /** Convert collection to array. * * Implementation note: DO NOT call [[Array.from]] from this method. */ - def toArray[B >: A: ClassTag]: Array[B] = + def toArray[sealed B >: A: ClassTag]: Array[B] = if (knownSize >= 0) { val destination = new Array[B](knownSize) copyToArray(destination, 0) @@ -1352,3 +1335,31 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => xs } } + +object IterableOnceOps: + + // Moved out of trait IterableOnceOps to here, since universal traits cannot + // have nested classes in Scala 3 + private class Maximized[X, B](descriptor: String)(f: X -> B)(cmp: (B, B) -> Boolean) extends AbstractFunction2[Maximized[X, B], X, Maximized[X, B]] { + var maxElem: X @uncheckedCaptures = null.asInstanceOf[X] + var maxF: B @uncheckedCaptures = null.asInstanceOf[B] + var nonEmpty = false + def toOption: Option[X] = if (nonEmpty) Some(maxElem) else None + def result: X = if (nonEmpty) maxElem else throw new UnsupportedOperationException(s"empty.$descriptor") + def apply(m: Maximized[X, B], a: X): Maximized[X, B] = + if (m.nonEmpty) { + val fa = f(a) + if (cmp(fa, maxF)) { + maxF = fa + maxElem = a + } + m + } + else { + m.nonEmpty = true + m.maxElem = a + m.maxF = f(a) + m + } + } +end IterableOnceOps \ No newline at end of file diff --git a/tests/pos-special/stdlib/collection/Iterator.scala b/tests/pos-special/stdlib/collection/Iterator.scala index 4b8338ed1b17..90fd387069b0 100644 --- a/tests/pos-special/stdlib/collection/Iterator.scala +++ b/tests/pos-special/stdlib/collection/Iterator.scala @@ -14,8 +14,11 @@ package scala.collection import scala.collection.mutable.{ArrayBuffer, ArrayBuilder, Builder, ImmutableBuilder} import scala.annotation.tailrec -import scala.annotation.unchecked.uncheckedVariance +import scala.annotation.unchecked.{uncheckedVariance, uncheckedCaptures} import scala.runtime.Statics +import language.experimental.captureChecking +import annotation.unchecked.uncheckedCaptures + /** Iterators are data structures that allow to iterate over a sequence * of elements. They have a `hasNext` method for checking @@ -71,7 +74,8 @@ import scala.runtime.Statics * iterators as well. * @define coll iterator */ -trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Iterator[A]] { self => +trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Iterator[A]] { + self: Iterator[A]^ => /** Check if there is a next element available. * @@ -93,7 +97,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite @throws[NoSuchElementException] def next(): A - @inline final def iterator = this + @inline final def iterator: Iterator[A]^{this} = this /** Wraps the value of `next()` in an option. * @@ -117,7 +121,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @return a buffered iterator producing the same values as this iterator. * @note Reuse: $consumesAndProducesIterator */ - def buffered: BufferedIterator[A] = new AbstractIterator[A] with BufferedIterator[A] { + def buffered: BufferedIterator[A]^{this} = new AbstractIterator[A] with BufferedIterator[A] { private[this] var hd: A = _ private[this] var hdDefined: Boolean = false @@ -153,16 +157,16 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * A `GroupedIterator` is yielded by `grouped` and by `sliding`, * where the `step` may differ from the group `size`. */ - class GroupedIterator[B >: A](self: Iterator[B], size: Int, step: Int) extends AbstractIterator[immutable.Seq[B]] { + class GroupedIterator[B >: A](self: Iterator[B]^, size: Int, step: Int) extends AbstractIterator[immutable.Seq[B]] { require(size >= 1 && step >= 1, f"size=$size%d and step=$step%d, but both must be positive") - private[this] var buffer: Array[B] = null // current result - private[this] var prev: Array[B] = null // if sliding, overlap from previous result + private[this] var buffer: Array[B @uncheckedCaptures] = null // current result + private[this] var prev: Array[B @uncheckedCaptures] = null // if sliding, overlap from previous result private[this] var first = true // if !first, advancing may skip ahead private[this] var filled = false // whether the buffer is "hot" private[this] var partial = true // whether to emit partial sequence - private[this] var padding: () => B = null // what to pad short sequences with + private[this] var padding: () -> B @uncheckedCaptures = null // what to pad short sequences with private[this] def pad = padding != null // irrespective of partial flag private[this] def newBuilder = { val b = ArrayBuilder.make[Any] @@ -185,7 +189,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @note This method is mutually exclusive with `withPartial`. * @group Configuration */ - def withPadding(x: => B): this.type = { + def withPadding(x: -> B): this.type = { padding = () => x partial = true // redundant, as padding always results in complete segment this @@ -254,7 +258,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } // segment must have data, and must be complete unless they allow partial val ok = index > 0 && (partial || index == size) - if (ok) buffer = builder.result().asInstanceOf[Array[B]] + if (ok) buffer = builder.result().asInstanceOf[Array[B @uncheckedCaptures]] else prev = null ok } @@ -291,7 +295,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * all elements of this $coll followed by the minimal number of occurrences of `elem` so * that the resulting collection has a length of at least `len`. */ - def padTo[B >: A](len: Int, elem: B): Iterator[B] = new AbstractIterator[B] { + def padTo[B >: A](len: Int, elem: B): Iterator[B]^{this} = new AbstractIterator[B] { private[this] var i = 0 override def knownSize: Int = { @@ -321,7 +325,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * is the same as in the original iterator. * @note Reuse: $consumesOneAndProducesTwoIterators */ - def partition(p: A => Boolean): (Iterator[A], Iterator[A]) = { + def partition(p: A => Boolean): (Iterator[A]^{this, p}, Iterator[A]^{this, p}) = { val (a, b) = duplicate (a filter p, b filterNot p) } @@ -341,7 +345,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @note Reuse: $consumesAndProducesIterator */ - def grouped[B >: A](size: Int): GroupedIterator[B] = + def grouped[B >: A](size: Int): GroupedIterator[B]^{this} = new GroupedIterator[B](self, size, size) /** Returns an iterator which presents a "sliding window" view of @@ -377,13 +381,13 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @note Reuse: $consumesAndProducesIterator */ - def sliding[B >: A](size: Int, step: Int = 1): GroupedIterator[B] = + def sliding[B >: A](size: Int, step: Int = 1): GroupedIterator[B]^{this} = new GroupedIterator[B](self, size, step) - def scanLeft[B](z: B)(op: (B, A) => B): Iterator[B] = new AbstractIterator[B] { + def scanLeft[B](z: B)(op: (B, A) => B): Iterator[B]^{this, op} = new AbstractIterator[B] { // We use an intermediate iterator that iterates through the first element `z` // and then that will be modified to iterate through the collection - private[this] var current: Iterator[B] = + private[this] var current: Iterator[B]^{self, op} = new AbstractIterator[B] { override def knownSize = { val thisSize = self.knownSize @@ -412,7 +416,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } @deprecated("Call scanRight on an Iterable instead.", "2.13.0") - def scanRight[B](z: B)(op: (A, B) => B): Iterator[B] = ArrayBuffer.from(this).scanRight(z)(op).iterator + def scanRight[B](z: B)(op: (A, B) => B): Iterator[B]^{this} = + ArrayBuffer.from[A @uncheckedCaptures](this).scanRight(z)(op).iterator + // @uncheckedCaptures is safe since the ArrayBuffer is local temporrary storage def indexWhere(p: A => Boolean, from: Int = 0): Int = { var i = math.max(from, 0) @@ -465,11 +471,11 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite @deprecatedOverriding("isEmpty is defined as !hasNext; override hasNext instead", "2.13.0") override def isEmpty: Boolean = !hasNext - def filter(p: A => Boolean): Iterator[A] = filterImpl(p, isFlipped = false) + def filter(p: A => Boolean): Iterator[A]^{this, p} = filterImpl(p, isFlipped = false) - def filterNot(p: A => Boolean): Iterator[A] = filterImpl(p, isFlipped = true) + def filterNot(p: A => Boolean): Iterator[A]^{this, p} = filterImpl(p, isFlipped = true) - private[collection] def filterImpl(p: A => Boolean, isFlipped: Boolean): Iterator[A] = new AbstractIterator[A] { + private[collection] def filterImpl(p: A => Boolean, isFlipped: Boolean): Iterator[A]^{this, p} = new AbstractIterator[A] { private[this] var hd: A = _ private[this] var hdDefined: Boolean = false @@ -479,9 +485,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite while (p(hd) == isFlipped) { if (!self.hasNext) return false hd = self.next() - } + } hdDefined = true - true + true } def next() = @@ -503,9 +509,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @return an iterator which produces those values of this iterator which satisfy the predicate `p`. * @note Reuse: $consumesAndProducesIterator */ - def withFilter(p: A => Boolean): Iterator[A] = filter(p) + def withFilter(p: A => Boolean): Iterator[A]^{this, p} = filter(p) - def collect[B](pf: PartialFunction[A, B]): Iterator[B] = new AbstractIterator[B] with (A => B) { + def collect[B](pf: PartialFunction[A, B]^): Iterator[B]^{this, pf} = new AbstractIterator[B] with (A -> B) { // Manually buffer to avoid extra layer of wrapping with buffered private[this] var hd: B = _ @@ -541,7 +547,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @note Reuse: $consumesIterator */ - def distinct: Iterator[A] = distinctBy(identity) + def distinct: Iterator[A]^{this} = distinctBy(identity) /** * Builds a new iterator from this one without any duplicated elements as determined by `==` after applying @@ -553,9 +559,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @note Reuse: $consumesIterator */ - def distinctBy[B](f: A => B): Iterator[A] = new AbstractIterator[A] { + def distinctBy[B](f: A -> B): Iterator[A]^{this} = new AbstractIterator[A] { - private[this] val traversedValues = mutable.HashSet.empty[B] + private[this] val traversedValues = mutable.HashSet.empty[B @uncheckedCaptures] private[this] var nextElementDefined: Boolean = false private[this] var nextElement: A = _ @@ -578,14 +584,14 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } } - def map[B](f: A => B): Iterator[B] = new AbstractIterator[B] { + def map[B](f: A => B): Iterator[B]^{this, f} = new AbstractIterator[B] { override def knownSize = self.knownSize def hasNext = self.hasNext def next() = f(self.next()) } - def flatMap[B](f: A => IterableOnce[B]): Iterator[B] = new AbstractIterator[B] { - private[this] var cur: Iterator[B] = Iterator.empty + def flatMap[B](f: A => IterableOnce[B]^): Iterator[B]^{this, f} = new AbstractIterator[B] { + private[this] var cur: Iterator[B]^{f} = Iterator.empty /** Trillium logic boolean: -1 = unknown, 0 = false, 1 = true */ private[this] var _hasNext: Int = -1 @@ -619,19 +625,19 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } } - def flatten[B](implicit ev: A => IterableOnce[B]): Iterator[B] = + def flatten[B](implicit ev: A -> IterableOnce[B]): Iterator[B]^{this} = flatMap[B](ev) - def concat[B >: A](xs: => IterableOnce[B]): Iterator[B] = new Iterator.ConcatIterator[B](self).concat(xs) + def concat[B >: A](xs: => IterableOnce[B]^): Iterator[B]^{this, xs} = new Iterator.ConcatIterator[B](self).concat(xs) - @`inline` final def ++ [B >: A](xs: => IterableOnce[B]): Iterator[B] = concat(xs) + @`inline` final def ++ [B >: A](xs: => IterableOnce[B]^): Iterator[B]^{this, xs} = concat(xs) - def take(n: Int): Iterator[A] = sliceIterator(0, n max 0) + def take(n: Int): Iterator[A]^{this} = sliceIterator(0, n max 0) - def takeWhile(p: A => Boolean): Iterator[A] = new AbstractIterator[A] { + def takeWhile(p: A => Boolean): Iterator[A]^{self, p} = new AbstractIterator[A] { private[this] var hd: A = _ private[this] var hdDefined: Boolean = false - private[this] var tail: Iterator[A] = self + private[this] var tail: Iterator[A]^{self} = self def hasNext = hdDefined || tail.hasNext && { hd = tail.next() @@ -642,9 +648,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite def next() = if (hasNext) { hdDefined = false; hd } else Iterator.empty.next() } - def drop(n: Int): Iterator[A] = sliceIterator(n, -1) + def drop(n: Int): Iterator[A]^{this} = sliceIterator(n, -1) - def dropWhile(p: A => Boolean): Iterator[A] = new AbstractIterator[A] { + def dropWhile(p: A => Boolean): Iterator[A]^{this, p} = new AbstractIterator[A] { // Magic value: -1 = hasn't dropped, 0 = found first, 1 = defer to parent iterator private[this] var status = -1 // Local buffering to avoid double-wrap with .buffered @@ -680,7 +686,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @note Reuse: $consumesOneAndProducesTwoIterators */ - def span(p: A => Boolean): (Iterator[A], Iterator[A]) = { + def span(p: A => Boolean): (Iterator[A]^{this, p}, Iterator[A]^{this, p}) = { /* * Giving a name to following iterator (as opposed to trailing) because * anonymous class is represented as a structural type that trailing @@ -698,7 +704,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite */ private[this] var status = 0 private def store(a: A): Unit = { - if (lookahead == null) lookahead = new mutable.Queue[A] + if (lookahead == null) lookahead = new mutable.Queue[A @uncheckedCaptures] lookahead += a } def hasNext = { @@ -779,10 +785,10 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite (leading, trailing) } - def slice(from: Int, until: Int): Iterator[A] = sliceIterator(from, until max 0) + def slice(from: Int, until: Int): Iterator[A]^{this} = sliceIterator(from, until max 0) /** Creates an optionally bounded slice, unbounded if `until` is negative. */ - protected def sliceIterator(from: Int, until: Int): Iterator[A] = { + protected def sliceIterator(from: Int, until: Int): Iterator[A]^{this} = { val lo = from max 0 val rest = if (until < 0) -1 // unbounded @@ -793,14 +799,14 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite else new Iterator.SliceIterator(this, lo, rest) } - def zip[B](that: IterableOnce[B]): Iterator[(A, B)] = new AbstractIterator[(A, B)] { + def zip[B](that: IterableOnce[B]^): Iterator[(A, B)]^{this, that} = new AbstractIterator[(A, B)] { val thatIterator = that.iterator override def knownSize = self.knownSize min thatIterator.knownSize def hasNext = self.hasNext && thatIterator.hasNext def next() = (self.next(), thatIterator.next()) } - def zipAll[A1 >: A, B](that: IterableOnce[B], thisElem: A1, thatElem: B): Iterator[(A1, B)] = new AbstractIterator[(A1, B)] { + def zipAll[A1 >: A, B](that: IterableOnce[B]^, thisElem: A1, thatElem: B): Iterator[(A1, B)]^{this, that} = new AbstractIterator[(A1, B)] { val thatIterator = that.iterator override def knownSize = { val thisSize = self.knownSize @@ -817,7 +823,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } } - def zipWithIndex: Iterator[(A, Int)] = new AbstractIterator[(A, Int)] { + def zipWithIndex: Iterator[(A, Int)]^{this} = new AbstractIterator[(A, Int)] { var idx = 0 override def knownSize = self.knownSize def hasNext = self.hasNext @@ -837,7 +843,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * * @inheritdoc */ - def sameElements[B >: A](that: IterableOnce[B]): Boolean = { + def sameElements[B >: A](that: IterableOnce[B]^): Boolean = { val those = that.iterator while (hasNext && those.hasNext) if (next() != those.next()) @@ -860,9 +866,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * iterated by one iterator but not yet by the other. * @note Reuse: $consumesOneAndProducesTwoIterators */ - def duplicate: (Iterator[A], Iterator[A]) = { - val gap = new scala.collection.mutable.Queue[A] - var ahead: Iterator[A] = null + def duplicate: (Iterator[A]^{this}, Iterator[A]^{this}) = { + val gap = new scala.collection.mutable.Queue[A @uncheckedCaptures] + var ahead: Iterator[A @uncheckedCaptures] = null // ahead is captured by Partner, so A is not recognized as parametric class Partner extends AbstractIterator[A] { override def knownSize: Int = self.synchronized { val thisSize = self.knownSize @@ -904,7 +910,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @param replaced The number of values in the original iterator that are replaced by the patch. * @note Reuse: $consumesTwoAndProducesOneIterator */ - def patch[B >: A](from: Int, patchElems: Iterator[B], replaced: Int): Iterator[B] = + def patch[B >: A](from: Int, patchElems: Iterator[B]^, replaced: Int): Iterator[B]^{this, patchElems} = new AbstractIterator[B] { private[this] var origElems = self // > 0 => that many more elems from `origElems` before switching to `patchElems` @@ -944,7 +950,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } } - override def tapEach[U](f: A => U): Iterator[A] = new AbstractIterator[A] { + override def tapEach[U](f: A => U): Iterator[A]^{this, f} = new AbstractIterator[A] { override def knownSize = self.knownSize override def hasNext = self.hasNext override def next() = { @@ -981,7 +987,7 @@ object Iterator extends IterableFactory[Iterator] { * @tparam A the type of the collection’s elements * @return a new $coll with the elements of `source` */ - override def from[A](source: IterableOnce[A]): Iterator[A] = source.iterator + override def from[A](source: IterableOnce[A]^): Iterator[A]^{source} = source.iterator /** The iterator which produces no values. */ @`inline` final def empty[T]: Iterator[T] = _empty @@ -1012,7 +1018,7 @@ object Iterator extends IterableFactory[Iterator] { * @param elem the element computation * @return An iterator that produces the results of `n` evaluations of `elem`. */ - override def fill[A](len: Int)(elem: => A): Iterator[A] = new AbstractIterator[A] { + override def fill[A](len: Int)(elem: => A): Iterator[A]^{elem} = new AbstractIterator[A] { private[this] var i = 0 override def knownSize: Int = (len - i) max 0 def hasNext: Boolean = i < len @@ -1027,7 +1033,7 @@ object Iterator extends IterableFactory[Iterator] { * @param f The function computing element values * @return An iterator that produces the values `f(0), ..., f(n -1)`. */ - override def tabulate[A](end: Int)(f: Int => A): Iterator[A] = new AbstractIterator[A] { + override def tabulate[A](end: Int)(f: Int => A): Iterator[A]^{f} = new AbstractIterator[A] { private[this] var i = 0 override def knownSize: Int = (end - i) max 0 def hasNext: Boolean = i < end @@ -1100,7 +1106,7 @@ object Iterator extends IterableFactory[Iterator] { * @param f the function that's repeatedly applied * @return the iterator producing the infinite sequence of values `start, f(start), f(f(start)), ...` */ - def iterate[T](start: T)(f: T => T): Iterator[T] = new AbstractIterator[T] { + def iterate[T](start: T)(f: T => T): Iterator[T]^{f} = new AbstractIterator[T] { private[this] var first = true private[this] var acc = start def hasNext: Boolean = true @@ -1122,7 +1128,7 @@ object Iterator extends IterableFactory[Iterator] { * @tparam S Type of the internal state * @return an Iterator that produces elements using `f` until `f` returns `None` */ - override def unfold[A, S](init: S)(f: S => Option[(A, S)]): Iterator[A] = new UnfoldIterator(init)(f) + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): Iterator[A]^{f} = new UnfoldIterator(init)(f) /** Creates an infinite-length iterator returning the results of evaluating an expression. * The expression is recomputed for every element. @@ -1130,7 +1136,7 @@ object Iterator extends IterableFactory[Iterator] { * @param elem the element computation. * @return the iterator containing an infinite number of results of evaluating `elem`. */ - def continually[A](elem: => A): Iterator[A] = new AbstractIterator[A] { + def continually[A](elem: => A): Iterator[A]^{elem} = new AbstractIterator[A] { def hasNext = true def next() = elem } @@ -1138,9 +1144,10 @@ object Iterator extends IterableFactory[Iterator] { /** Creates an iterator to which other iterators can be appended efficiently. * Nested ConcatIterators are merged to avoid blowing the stack. */ - private final class ConcatIterator[+A](private var current: Iterator[A @uncheckedVariance]) extends AbstractIterator[A] { - private var tail: ConcatIteratorCell[A @uncheckedVariance] = null - private var last: ConcatIteratorCell[A @uncheckedVariance] = null + private final class ConcatIterator[+A](val from: Iterator[A]^) extends AbstractIterator[A] { + private var current: Iterator[A @uncheckedCaptures]^{cap[ConcatIterator]} = from + private var tail: ConcatIteratorCell[A @uncheckedVariance @uncheckedCaptures] = null + private var last: ConcatIteratorCell[A @uncheckedVariance @uncheckedCaptures] = null private var currentHasNextChecked = false def hasNext = @@ -1194,8 +1201,8 @@ object Iterator extends IterableFactory[Iterator] { current.next() } else Iterator.empty.next() - override def concat[B >: A](that: => IterableOnce[B]): Iterator[B] = { - val c = new ConcatIteratorCell[B](that, null).asInstanceOf[ConcatIteratorCell[A]] + override def concat[B >: A](that: => IterableOnce[B]^): Iterator[B]^{this, that} = { + val c: ConcatIteratorCell[A] = new ConcatIteratorCell[B](that, null).asInstanceOf if (tail == null) { tail = c last = c @@ -1209,14 +1216,14 @@ object Iterator extends IterableFactory[Iterator] { } } - private[this] final class ConcatIteratorCell[A](head: => IterableOnce[A], var tail: ConcatIteratorCell[A]) { - def headIterator: Iterator[A] = head.iterator + private[this] final class ConcatIteratorCell[A](head: => IterableOnce[A]^, var tail: ConcatIteratorCell[A @uncheckedCaptures]) { + def headIterator: Iterator[A]^{this} = head.iterator // CC todo: can't use {head} as capture set, gives "cannot establish a reference" } /** Creates a delegating iterator capped by a limit count. Negative limit means unbounded. * Lazily skip to start on first evaluation. Avoids daisy-chained iterators due to slicing. */ - private[scala] final class SliceIterator[A](val underlying: Iterator[A], start: Int, limit: Int) extends AbstractIterator[A] { + private[scala] final class SliceIterator[A](val underlying: Iterator[A]^, start: Int, limit: Int) extends AbstractIterator[A] { private[this] var remaining = limit private[this] var dropping = start @inline private def unbounded = remaining < 0 @@ -1247,7 +1254,7 @@ object Iterator extends IterableFactory[Iterator] { else if (unbounded) underlying.next() else empty.next() } - override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { + override protected def sliceIterator(from: Int, until: Int): Iterator[A]^{underlying} = { val lo = from max 0 def adjustedBound = if (unbounded) -1 @@ -1269,9 +1276,9 @@ object Iterator extends IterableFactory[Iterator] { /** Creates an iterator that uses a function `f` to produce elements of * type `A` and update an internal state of type `S`. */ - private final class UnfoldIterator[A, S](init: S)(f: S => Option[(A, S)]) extends AbstractIterator[A] { - private[this] var state: S = init - private[this] var nextResult: Option[(A, S)] = null + private final class UnfoldIterator[A, S](init: S)(f: S => Option[(A, S)])extends AbstractIterator[A] { + private[this] var state: S @uncheckedCaptures = init + private[this] var nextResult: Option[(A, S)] @uncheckedCaptures = null override def hasNext: Boolean = { if (nextResult eq null) { @@ -1297,4 +1304,5 @@ object Iterator extends IterableFactory[Iterator] { } /** Explicit instantiation of the `Iterator` trait to reduce class file size in subclasses. */ -abstract class AbstractIterator[+A] extends Iterator[A] +abstract class AbstractIterator[+A] extends Iterator[A]: + this: Iterator[A]^ => diff --git a/tests/pos-special/stdlib/collection/JavaConverters.scala b/tests/pos-special/stdlib/collection/JavaConverters.scala index 569e4e8c60a7..69130eae1829 100644 --- a/tests/pos-special/stdlib/collection/JavaConverters.scala +++ b/tests/pos-special/stdlib/collection/JavaConverters.scala @@ -17,6 +17,7 @@ import java.{lang => jl, util => ju} import scala.collection.convert._ import scala.language.implicitConversions +import language.experimental.captureChecking /** A variety of decorators that enable converting between * Scala and Java collections using extension methods, `asScala` and `asJava`. diff --git a/tests/pos-special/stdlib/collection/LazyZipOps.scala b/tests/pos-special/stdlib/collection/LazyZipOps.scala index 0553eb8edf7f..1bb4173d219f 100644 --- a/tests/pos-special/stdlib/collection/LazyZipOps.scala +++ b/tests/pos-special/stdlib/collection/LazyZipOps.scala @@ -13,6 +13,7 @@ package scala.collection import scala.language.implicitConversions +import language.experimental.captureChecking /** Decorator representing lazily zipped pairs. * @@ -21,7 +22,7 @@ import scala.language.implicitConversions * * Note: will not terminate for infinite-sized collections. */ -final class LazyZip2[+El1, +El2, C1] private[collection](src: C1, coll1: Iterable[El1], coll2: Iterable[El2]) { +final class LazyZip2[+El1, +El2, C1] private[collection](src: C1, coll1: Iterable[El1]^, coll2: Iterable[El2]^) { /** Zips `that` iterable collection with an existing `LazyZip2`. The elements in each collection are * not consumed until a strict operation is invoked on the returned `LazyZip3` decorator. @@ -31,7 +32,7 @@ final class LazyZip2[+El1, +El2, C1] private[collection](src: C1, coll1: Iterabl * @return a decorator `LazyZip3` that allows strict operations to be performed on the lazily evaluated tuples or * chained calls to `lazyZip`. Implicit conversion to `Iterable[(El1, El2, B)]` is also supported. */ - def lazyZip[B](that: Iterable[B]): LazyZip3[El1, El2, B, C1] = new LazyZip3(src, coll1, coll2, that) + def lazyZip[B](that: Iterable[B]^): LazyZip3[El1, El2, B, C1]^{this, that} = new LazyZip3(src, coll1, coll2, that) def map[B, C](f: (El1, El2) => B)(implicit bf: BuildFrom[C1, B, C]): C = { bf.fromSpecific(src)(new AbstractView[B] { @@ -147,9 +148,9 @@ object LazyZip2 { * Note: will not terminate for infinite-sized collections. */ final class LazyZip3[+El1, +El2, +El3, C1] private[collection](src: C1, - coll1: Iterable[El1], - coll2: Iterable[El2], - coll3: Iterable[El3]) { + coll1: Iterable[El1]^, + coll2: Iterable[El2]^, + coll3: Iterable[El3]^) { /** Zips `that` iterable collection with an existing `LazyZip3`. The elements in each collection are * not consumed until a strict operation is invoked on the returned `LazyZip4` decorator. @@ -159,7 +160,7 @@ final class LazyZip3[+El1, +El2, +El3, C1] private[collection](src: C1, * @return a decorator `LazyZip4` that allows strict operations to be performed on the lazily evaluated tuples. * Implicit conversion to `Iterable[(El1, El2, El3, B)]` is also supported. */ - def lazyZip[B](that: Iterable[B]): LazyZip4[El1, El2, El3, B, C1] = new LazyZip4(src, coll1, coll2, coll3, that) + def lazyZip[B](that: Iterable[B]^): LazyZip4[El1, El2, El3, B, C1]^{this, that} = new LazyZip4(src, coll1, coll2, coll3, that) def map[B, C](f: (El1, El2, El3) => B)(implicit bf: BuildFrom[C1, B, C]): C = { bf.fromSpecific(src)(new AbstractView[B] { @@ -288,10 +289,10 @@ object LazyZip3 { * Note: will not terminate for infinite-sized collections. */ final class LazyZip4[+El1, +El2, +El3, +El4, C1] private[collection](src: C1, - coll1: Iterable[El1], - coll2: Iterable[El2], - coll3: Iterable[El3], - coll4: Iterable[El4]) { + coll1: Iterable[El1]^, + coll2: Iterable[El2]^, + coll3: Iterable[El3]^, + coll4: Iterable[El4]^) { def map[B, C](f: (El1, El2, El3, El4) => B)(implicit bf: BuildFrom[C1, B, C]): C = { bf.fromSpecific(src)(new AbstractView[B] { diff --git a/tests/pos-special/stdlib/collection/LinearSeq.scala b/tests/pos-special/stdlib/collection/LinearSeq.scala index 449d58c866e3..393f5fda4187 100644 --- a/tests/pos-special/stdlib/collection/LinearSeq.scala +++ b/tests/pos-special/stdlib/collection/LinearSeq.scala @@ -14,6 +14,7 @@ package scala package collection import scala.annotation.{nowarn, tailrec} +import language.experimental.captureChecking /** Base trait for linearly accessed sequences that have efficient `head` and * `tail` operations. @@ -32,7 +33,7 @@ trait LinearSeq[+A] extends Seq[A] object LinearSeq extends SeqFactory.Delegate[LinearSeq](immutable.LinearSeq) /** Base trait for linear Seq operations */ -trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeqOps[A, CC, C]] extends Any with SeqOps[A, CC, C] { +trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeqOps[A, CC, C]] extends AnyRef with SeqOps[A, CC, C] { /** @inheritdoc * @@ -96,7 +97,7 @@ trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeq else loop(0, coll) } - override def lengthCompare(that: Iterable[_]): Int = { + override def lengthCompare(that: Iterable[_]^): Int = { val thatKnownSize = that.knownSize if (thatKnownSize >= 0) this lengthCompare thatKnownSize @@ -186,7 +187,7 @@ trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeq acc } - override def sameElements[B >: A](that: IterableOnce[B]): Boolean = { + override def sameElements[B >: A](that: IterableOnce[B]^): Boolean = { @tailrec def linearSeqEq(a: LinearSeq[B], b: LinearSeq[B]): Boolean = (a eq b) || { if (a.nonEmpty && b.nonEmpty && a.head == b.head) { @@ -259,7 +260,7 @@ trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeq } } -trait StrictOptimizedLinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with StrictOptimizedLinearSeqOps[A, CC, C]] extends Any with LinearSeqOps[A, CC, C] with StrictOptimizedSeqOps[A, CC, C] { +trait StrictOptimizedLinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with StrictOptimizedLinearSeqOps[A, CC, C]] extends AnyRef with LinearSeqOps[A, CC, C] with StrictOptimizedSeqOps[A, CC, C] { // A more efficient iterator implementation than the default LinearSeqIterator override def iterator: Iterator[A] = new AbstractIterator[A] { private[this] var current = StrictOptimizedLinearSeqOps.this diff --git a/tests/pos-special/stdlib/collection/Map.scala b/tests/pos-special/stdlib/collection/Map.scala index 0fb6df9a06dc..8ab25a3c13e0 100644 --- a/tests/pos-special/stdlib/collection/Map.scala +++ b/tests/pos-special/stdlib/collection/Map.scala @@ -17,13 +17,16 @@ import scala.annotation.nowarn import scala.collection.generic.DefaultSerializable import scala.collection.mutable.StringBuilder import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure /** Base Map type */ trait Map[K, +V] extends Iterable[(K, V)] with MapOps[K, V, Map, Map[K, V]] with MapFactoryDefaults[K, V, Map, Iterable] - with Equals { + with Equals + with Pure { def mapFactory: scala.collection.MapFactory[Map] = Map @@ -101,8 +104,9 @@ trait Map[K, +V] trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] extends IterableOps[(K, V), Iterable, C] with PartialFunction[K, V] { + this: MapOps[K, V, CC, C]^ => - override def view: MapView[K, V] = new MapView.Id(this) + override def view: MapView[K, V]^{this} = new MapView.Id(this) /** Returns a [[Stepper]] for the keys of this map. See method [[stepper]]. */ def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S = { @@ -131,7 +135,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] /** Similar to `fromIterable`, but returns a Map collection type. * Note that the return type is now `CC[K2, V2]`. */ - @`inline` protected final def mapFromIterable[K2, V2](it: Iterable[(K2, V2)]): CC[K2, V2] = mapFactory.from(it) + @`inline` protected final def mapFromIterable[K2, V2](it: Iterable[(K2, V2)]^): CC[K2, V2] = mapFactory.from(it) /** The companion object of this map, providing various factory methods. * @@ -251,7 +255,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * the predicate `p`. The resulting map wraps the original map without copying any elements. */ @deprecated("Use .view.filterKeys(f). A future version will include a strict version of this method (for now, .view.filterKeys(p).toMap).", "2.13.0") - def filterKeys(p: K => Boolean): MapView[K, V] = new MapView.FilterKeys(this, p) + def filterKeys(p: K => Boolean): MapView[K, V]^{this, p} = new MapView.FilterKeys(this, p) /** Transforms this map by applying a function to every retrieved value. * @param f the function used to transform values of this map. @@ -259,7 +263,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * to `f(this(key))`. The resulting map wraps the original map without copying any elements. */ @deprecated("Use .view.mapValues(f). A future version will include a strict version of this method (for now, .view.mapValues(f).toMap).", "2.13.0") - def mapValues[W](f: V => W): MapView[K, W] = new MapView.MapValues(this, f) + def mapValues[W](f: V => W): MapView[K, W]^{this, f} = new MapView.MapValues(this, f) /** Defines the default value computation for the map, * returned when a key is not found @@ -318,7 +322,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * @return a new $coll resulting from applying the given collection-valued function * `f` to each element of this $coll and concatenating the results. */ - def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = mapFactory.from(new View.FlatMap(this, f)) + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = mapFactory.from(new View.FlatMap(this, f)) /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the * right hand operand. The element type of the $coll is the most specific superclass encompassing @@ -328,7 +332,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * @return a new $coll which contains all elements * of this $coll followed by all elements of `suffix`. */ - def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]): CC[K, V2] = mapFactory.from(suffix match { + def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]^): CC[K, V2] = mapFactory.from(suffix match { case it: Iterable[(K, V2)] => new View.Concat(this, it) case _ => iterator.concat(suffix.iterator) }) @@ -336,7 +340,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] // Not final because subclasses refine the result type, e.g. in SortedMap, the result type is // SortedMap's CC, while Map's CC is fixed to Map /** Alias for `concat` */ - /*@`inline` final*/ def ++ [V2 >: V](xs: collection.IterableOnce[(K, V2)]): CC[K, V2] = concat(xs) + /*@`inline` final*/ def ++ [V2 >: V](xs: collection.IterableOnce[(K, V2)]^): CC[K, V2] = concat(xs) override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = iterator.map { case (k, v) => s"$k -> $v" }.addString(sb, start, sep, end) @@ -350,14 +354,14 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] mapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) @deprecated("Consider requiring an immutable Map.", "2.13.0") - @`inline` def -- (keys: IterableOnce[K]): C = { + @`inline` def -- (keys: IterableOnce[K]^): C = { lazy val keysSet = keys.iterator.to(immutable.Set) - fromSpecific(this.view.filterKeys(k => !keysSet.contains(k))) + fromSpecific(this.view.filterKeys(k => !keysSet.contains(k))).unsafeAssumePure } @deprecated("Use ++ instead of ++: for collections of type Iterable", "2.13.0") - def ++: [V1 >: V](that: IterableOnce[(K,V1)]): CC[K,V1] = { - val thatIterable: Iterable[(K, V1)] = that match { + def ++: [V1 >: V](that: IterableOnce[(K,V1)]^): CC[K,V1] = { + val thatIterable: Iterable[(K, V1)]^{that} = that match { case that: Iterable[(K, V1)] => that case that => View.from(that) } @@ -373,17 +377,17 @@ object MapOps { */ @SerialVersionUID(3L) class WithFilter[K, +V, +IterableCC[_], +CC[_, _] <: IterableOps[_, AnyConstr, _]]( - self: MapOps[K, V, CC, _] with IterableOps[(K, V), IterableCC, _], + self: (MapOps[K, V, CC, _] with IterableOps[(K, V), IterableCC, _])^, p: ((K, V)) => Boolean ) extends IterableOps.WithFilter[(K, V), IterableCC](self, p) with Serializable { - def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = + def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2]^{this, f} = self.mapFactory.from(new View.Map(filtered, f)) - def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2]^{this, f} = self.mapFactory.from(new View.FlatMap(filtered, f)) - override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, CC] = + override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, CC]^{this, q} = new WithFilter[K, V, IterableCC, CC](self, (kv: (K, V)) => p(kv) && q(kv)) } diff --git a/tests/pos-special/stdlib/collection/MapView.scala b/tests/pos-special/stdlib/collection/MapView.scala index 7f84178a7c16..ac9e88466052 100644 --- a/tests/pos-special/stdlib/collection/MapView.scala +++ b/tests/pos-special/stdlib/collection/MapView.scala @@ -15,54 +15,57 @@ package scala.collection import scala.annotation.nowarn import scala.collection.MapView.SomeMapOps import scala.collection.mutable.Builder +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure trait MapView[K, +V] extends MapOps[K, V, ({ type l[X, Y] = View[(X, Y)] })#l, View[(K, V)]] with View[(K, V)] { + this: MapView[K, V]^ => - override def view: MapView[K, V] = this + override def view: MapView[K, V]^{this} = this // Ideally this returns a `View`, but bincompat /** Creates a view over all keys of this map. * * @return the keys of this map as a view. */ - override def keys: Iterable[K] = new MapView.Keys(this) + override def keys: Iterable[K]^{this} = new MapView.Keys(this) // Ideally this returns a `View`, but bincompat /** Creates a view over all values of this map. * * @return the values of this map as a view. */ - override def values: Iterable[V] = new MapView.Values(this) + override def values: Iterable[V]^{this} = new MapView.Values(this) /** Filters this map by retaining only keys satisfying a predicate. * @param p the predicate used to test keys * @return an immutable map consisting only of those key value pairs of this map where the key satisfies * the predicate `p`. The resulting map wraps the original map without copying any elements. */ - override def filterKeys(p: K => Boolean): MapView[K, V] = new MapView.FilterKeys(this, p) + override def filterKeys(p: K => Boolean): MapView[K, V]^{this, p} = new MapView.FilterKeys(this, p) /** Transforms this map by applying a function to every retrieved value. * @param f the function used to transform values of this map. * @return a map view which maps every key of this map * to `f(this(key))`. The resulting map wraps the original map without copying any elements. */ - override def mapValues[W](f: V => W): MapView[K, W] = new MapView.MapValues(this, f) + override def mapValues[W](f: V => W): MapView[K, W]^{this, f} = new MapView.MapValues(this, f) - override def filter(pred: ((K, V)) => Boolean): MapView[K, V] = new MapView.Filter(this, false, pred) + override def filter(pred: ((K, V)) => Boolean): MapView[K, V]^{this, pred} = new MapView.Filter(this, false, pred) - override def filterNot(pred: ((K, V)) => Boolean): MapView[K, V] = new MapView.Filter(this, true, pred) + override def filterNot(pred: ((K, V)) => Boolean): MapView[K, V]^{this, pred} = new MapView.Filter(this, true, pred) - override def partition(p: ((K, V)) => Boolean): (MapView[K, V], MapView[K, V]) = (filter(p), filterNot(p)) + override def partition(p: ((K, V)) => Boolean): (MapView[K, V]^{this, p}, MapView[K, V]^{this, p}) = (filter(p), filterNot(p)) - override def tapEach[U](f: ((K, V)) => U): MapView[K, V] = new MapView.TapEach(this, f) + override def tapEach[U](f: ((K, V)) => U): MapView[K, V]^{this, f} = new MapView.TapEach(this, f) def mapFactory: MapViewFactory = MapView override def empty: MapView[K, V] = mapFactory.empty - override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, View, ({ type l[X, Y] = View[(X, Y)] })#l] = new MapOps.WithFilter(this, p) + override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, View, ({ type l[X, Y] = View[(X, Y)] })#l]^{this, p} = new MapOps.WithFilter(this, p) override def toString: String = super[View].toString @@ -78,7 +81,9 @@ object MapView extends MapViewFactory { type SomeMapOps[K, +V] = MapOps[K, V, SomeIterableConstr, _] @SerialVersionUID(3L) - private val EmptyMapView: MapView[Any, Nothing] = new AbstractMapView[Any, Nothing] { + object EmptyMapView extends AbstractMapView[Any, Nothing] { + // !!! cc problem: crash when we replace the line with + // private val EmptyMapView: MapView[Any, Nothing] = new AbstractMapView[Any, Nothing] { override def get(key: Any): Option[Nothing] = None override def iterator: Iterator[Nothing] = Iterator.empty[Nothing] override def knownSize: Int = 0 @@ -91,48 +96,48 @@ object MapView extends MapViewFactory { } @SerialVersionUID(3L) - class Id[K, +V](underlying: SomeMapOps[K, V]) extends AbstractMapView[K, V] { + class Id[K, +V](underlying: SomeMapOps[K, V]^) extends AbstractMapView[K, V] { def get(key: K): Option[V] = underlying.get(key) - def iterator: Iterator[(K, V)] = underlying.iterator + def iterator: Iterator[(K, V)]^{this} = underlying.iterator override def knownSize: Int = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } // Ideally this is public, but bincompat @SerialVersionUID(3L) - private class Keys[K](underlying: SomeMapOps[K, _]) extends AbstractView[K] { - def iterator: Iterator[K] = underlying.keysIterator + private class Keys[K](underlying: SomeMapOps[K, _]^) extends AbstractView[K] { + def iterator: Iterator[K]^{this} = underlying.keysIterator override def knownSize: Int = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } // Ideally this is public, but bincompat @SerialVersionUID(3L) - private class Values[+V](underlying: SomeMapOps[_, V]) extends AbstractView[V] { - def iterator: Iterator[V] = underlying.valuesIterator + private class Values[+V](underlying: SomeMapOps[_, V]^) extends AbstractView[V] { + def iterator: Iterator[V]^{this} = underlying.valuesIterator override def knownSize: Int = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } @SerialVersionUID(3L) - class MapValues[K, +V, +W](underlying: SomeMapOps[K, V], f: V => W) extends AbstractMapView[K, W] { - def iterator: Iterator[(K, W)] = underlying.iterator.map(kv => (kv._1, f(kv._2))) + class MapValues[K, +V, +W](underlying: SomeMapOps[K, V]^, f: V => W) extends AbstractMapView[K, W] { + def iterator: Iterator[(K, W)]^{this} = underlying.iterator.map(kv => (kv._1, f(kv._2))) def get(key: K): Option[W] = underlying.get(key).map(f) override def knownSize: Int = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } @SerialVersionUID(3L) - class FilterKeys[K, +V](underlying: SomeMapOps[K, V], p: K => Boolean) extends AbstractMapView[K, V] { - def iterator: Iterator[(K, V)] = underlying.iterator.filter { case (k, _) => p(k) } + class FilterKeys[K, +V](underlying: SomeMapOps[K, V]^, p: K => Boolean) extends AbstractMapView[K, V] { + def iterator: Iterator[(K, V)]^{this} = underlying.iterator.filter { case (k, _) => p(k) } def get(key: K): Option[V] = if (p(key)) underlying.get(key) else None override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = iterator.isEmpty } @SerialVersionUID(3L) - class Filter[K, +V](underlying: SomeMapOps[K, V], isFlipped: Boolean, p: ((K, V)) => Boolean) extends AbstractMapView[K, V] { - def iterator: Iterator[(K, V)] = underlying.iterator.filterImpl(p, isFlipped) + class Filter[K, +V](underlying: SomeMapOps[K, V]^, isFlipped: Boolean, p: ((K, V)) => Boolean) extends AbstractMapView[K, V] { + def iterator: Iterator[(K, V)]^{this} = underlying.iterator.filterImpl(p, isFlipped) def get(key: K): Option[V] = underlying.get(key) match { case s @ Some(v) if p((key, v)) != isFlipped => s case _ => None @@ -142,7 +147,7 @@ object MapView extends MapViewFactory { } @SerialVersionUID(3L) - class TapEach[K, +V, +U](underlying: SomeMapOps[K, V], f: ((K, V)) => U) extends AbstractMapView[K, V] { + class TapEach[K, +V, +U](underlying: SomeMapOps[K, V]^, f: ((K, V)) => U) extends AbstractMapView[K, V] { override def get(key: K): Option[V] = { underlying.get(key) match { case s @ Some(v) => @@ -151,18 +156,21 @@ object MapView extends MapViewFactory { case None => None } } - override def iterator: Iterator[(K, V)] = underlying.iterator.tapEach(f) + override def iterator: Iterator[(K, V)]^{this} = underlying.iterator.tapEach(f) override def knownSize: Int = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } - override def newBuilder[X, Y]: Builder[(X, Y), MapView[X, Y]] = mutable.HashMap.newBuilder[X, Y].mapResult(_.view) + override def newBuilder[sealed X, sealed Y]: Builder[(X, Y), MapView[X, Y]] = mutable.HashMap.newBuilder[X, Y].mapResult(_.view) override def empty[K, V]: MapView[K, V] = EmptyMapView.asInstanceOf[MapView[K, V]] - override def from[K, V](it: IterableOnce[(K, V)]): View[(K, V)] = View.from(it) + override def from[K, V](it: IterableOnce[(K, V)]^): View[(K, V)] = + View.from(it).unsafeAssumePure + // unsafeAssumePure needed here since MapViewFactory inherits from MapFactory, + // and the latter assumes maps are strict, so from's result captures nothing. - override def from[K, V](it: SomeMapOps[K, V]): MapView[K, V] = it match { + override def from[K, V](it: SomeMapOps[K, V]^): MapView[K, V]^{it} = it match { case mv: MapView[K, V] => mv case other => new MapView.Id(other) } @@ -176,12 +184,13 @@ trait MapViewFactory extends collection.MapFactory[({ type l[X, Y] = View[(X, Y) def empty[X, Y]: MapView[X, Y] - def from[K, V](it: SomeMapOps[K, V]): MapView[K, V] + def from[K, V](it: SomeMapOps[K, V]^): MapView[K, V]^{it} override def apply[K, V](elems: (K, V)*): MapView[K, V] = from(elems.toMap) } /** Explicit instantiation of the `MapView` trait to reduce class file size in subclasses. */ @SerialVersionUID(3L) -abstract class AbstractMapView[K, +V] extends AbstractView[(K, V)] with MapView[K, V] +abstract class AbstractMapView[K, +V] extends AbstractView[(K, V)] with MapView[K, V]: + this: AbstractMapView[K, V]^ => diff --git a/tests/pos-special/stdlib/collection/Searching.scala b/tests/pos-special/stdlib/collection/Searching.scala index 874a06449aa9..f5139422e24c 100644 --- a/tests/pos-special/stdlib/collection/Searching.scala +++ b/tests/pos-special/stdlib/collection/Searching.scala @@ -14,6 +14,7 @@ package scala.collection import scala.language.implicitConversions import scala.collection.generic.IsSeq +import language.experimental.captureChecking object Searching { diff --git a/tests/pos-special/stdlib/collection/Seq.scala b/tests/pos-special/stdlib/collection/Seq.scala index d960838fdcb7..365a1db1b849 100644 --- a/tests/pos-special/stdlib/collection/Seq.scala +++ b/tests/pos-special/stdlib/collection/Seq.scala @@ -16,6 +16,9 @@ import scala.collection.immutable.Range import scala.util.hashing.MurmurHash3 import Searching.{Found, InsertionPoint, SearchResult} import scala.annotation.nowarn +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure +import scala.annotation.unchecked.uncheckedCaptures /** Base trait for sequence collections * @@ -27,6 +30,7 @@ trait Seq[+A] with SeqOps[A, Seq, Seq[A]] with IterableFactoryDefaults[A, Seq] with Equals { + this: Seq[A] => override def iterableFactory: SeqFactory[Seq] = Seq @@ -74,11 +78,12 @@ object Seq extends SeqFactory.Delegate[Seq](immutable.Seq) * @define coll sequence * @define Coll `Seq` */ -trait SeqOps[+A, +CC[_], +C] extends Any - with IterableOps[A, CC, C] { self => +trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self => override def view: SeqView[A] = new SeqView.Id[A](this) + def iterableFactory: FreeSeqFactory[CC] + /** Get the element at the specified index. This operation is provided for convenience in `Seq`. It should * not be assumed to be efficient unless you have an `IndexedSeq`. */ @throws[IndexOutOfBoundsException] @@ -160,13 +165,13 @@ trait SeqOps[+A, +CC[_], +C] extends Any * @return a new $coll which contains all elements of `prefix` followed * by all the elements of this $coll. */ - def prependedAll[B >: A](prefix: IterableOnce[B]): CC[B] = iterableFactory.from(prefix match { + def prependedAll[B >: A](prefix: IterableOnce[B]^): CC[B] = iterableFactory.from(prefix match { case prefix: Iterable[B] => new View.Concat(prefix, this) case _ => prefix.iterator ++ iterator }) /** Alias for `prependedAll` */ - @`inline` override final def ++: [B >: A](prefix: IterableOnce[B]): CC[B] = prependedAll(prefix) + @`inline` override final def ++: [B >: A](prefix: IterableOnce[B]^): CC[B] = prependedAll(prefix) /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the * right hand operand. The element type of the $coll is the most specific superclass encompassing @@ -177,14 +182,15 @@ trait SeqOps[+A, +CC[_], +C] extends Any * @return a new collection of type `CC[B]` which contains all elements * of this $coll followed by all elements of `suffix`. */ - def appendedAll[B >: A](suffix: IterableOnce[B]): CC[B] = super.concat(suffix) + def appendedAll[B >: A](suffix: IterableOnce[B]^): CC[B] = + super.concat(suffix).unsafeAssumePure /** Alias for `appendedAll` */ - @`inline` final def :++ [B >: A](suffix: IterableOnce[B]): CC[B] = appendedAll(suffix) + @`inline` final def :++ [B >: A](suffix: IterableOnce[B]^): CC[B] = appendedAll(suffix) // Make `concat` an alias for `appendedAll` so that it benefits from performance // overrides of this method - @`inline` final override def concat[B >: A](suffix: IterableOnce[B]): CC[B] = appendedAll(suffix) + @`inline` final override def concat[B >: A](suffix: IterableOnce[B]^): CC[B] = appendedAll(suffix) /** Produces a new sequence which contains all elements of this $coll and also all elements of * a given sequence. `xs union ys` is equivalent to `xs ++ ys`. @@ -212,7 +218,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any * @tparam B the type of the elements after being transformed by `f` * @return a new $coll consisting of all the elements of this $coll without duplicates. */ - def distinctBy[B](f: A => B): C = fromSpecific(new View.DistinctBy(this, f)) + def distinctBy[B](f: A -> B): C = fromSpecific(new View.DistinctBy(this, f)) /** Returns new $coll with elements in reversed order. * @@ -231,7 +237,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any * * @return an iterator yielding the elements of this $coll in reversed order */ - def reverseIterator: Iterator[A] = reversed.iterator + override def reverseIterator: Iterator[A] = reversed.iterator /** Tests whether this $coll contains the given sequence at a given index. * @@ -243,7 +249,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any * @return `true` if the sequence `that` is contained in this $coll at * index `offset`, otherwise `false`. */ - def startsWith[B >: A](that: IterableOnce[B], offset: Int = 0): Boolean = { + def startsWith[B >: A](that: IterableOnce[B]^, offset: Int = 0): Boolean = { val i = iterator drop offset val j = that.iterator while (j.hasNext && i.hasNext) @@ -258,7 +264,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any * @param that the sequence to test * @return `true` if this $coll has `that` as a suffix, `false` otherwise. */ - def endsWith[B >: A](that: Iterable[B]): Boolean = { + def endsWith[B >: A](that: Iterable[B]^): Boolean = { if (that.isEmpty) true else { val i = iterator.drop(length - that.size) @@ -595,7 +601,8 @@ trait SeqOps[+A, +CC[_], +C] extends Any if (!hasNext) Iterator.empty.next() - val forcedElms = new mutable.ArrayBuffer[A](elms.size) ++= elms + val forcedElms = new mutable.ArrayBuffer[A @uncheckedCaptures](elms.size) ++= elms + // uncheckedCaptures OK since used only locally val result = (newSpecificBuilder ++= forcedElms).result() var i = idxs.length - 2 while(i >= 0 && idxs(i) >= idxs(i+1)) @@ -628,6 +635,9 @@ trait SeqOps[+A, +CC[_], +C] extends Any private[this] def init() = { val m = mutable.HashMap[A, Int]() + //val s1 = self.toGenericSeq map (e => (e, m.getOrElseUpdate(e, m.size))) + //val s2: Seq[(A, Int)] = s1 sortBy (_._2) + //val (es, is) = s2.unzip(using Predef.$conforms[(A, Int)]) val (es, is) = (self.toGenericSeq map (e => (e, m.getOrElseUpdate(e, m.size))) sortBy (_._2)).unzip (es.to(mutable.ArrayBuffer), is.toArray) @@ -807,7 +817,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any */ def lengthCompare(len: Int): Int = super.sizeCompare(len) - override final def sizeCompare(that: Iterable[_]): Int = lengthCompare(that) + override final def sizeCompare(that: Iterable[_]^): Int = lengthCompare(that) /** Compares the length of this $coll to the size of another `Iterable`. * @@ -822,7 +832,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any * is `O(this.length min that.size)` instead of `O(this.length + that.size)`. * The method should be overridden if computing `size` is cheap and `knownSize` returns `-1`. */ - def lengthCompare(that: Iterable[_]): Int = super.sizeCompare(that) + def lengthCompare(that: Iterable[_]^): Int = super.sizeCompare(that) /** Returns a value class containing operations for comparing the length of this $coll to a test value. * @@ -845,7 +855,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any /** Are the elements of this collection the same (and in the same order) * as those of `that`? */ - def sameElements[B >: A](that: IterableOnce[B]): Boolean = { + def sameElements[B >: A](that: IterableOnce[B]^): Boolean = { val thisKnownSize = knownSize val knownSizeDifference = thisKnownSize != -1 && { val thatKnownSize = that.knownSize @@ -883,7 +893,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any * part of the result, but any following occurrences will. */ def diff[B >: A](that: Seq[B]): C = { - val occ = occCounts(that) + val occ = occCounts[B @uncheckedCaptures](that) fromSpecific(iterator.filter { x => var include = false occ.updateWith(x) { @@ -908,7 +918,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any * in the result, but any following occurrences will be omitted. */ def intersect[B >: A](that: Seq[B]): C = { - val occ = occCounts(that) + val occ = occCounts[B @uncheckedCaptures](that) fromSpecific(iterator.filter { x => var include = true occ.updateWith(x) { @@ -937,7 +947,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any * except that `replaced` elements starting from `from` are replaced * by all the elements of `other`. */ - def patch[B >: A](from: Int, other: IterableOnce[B], replaced: Int): CC[B] = + def patch[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): CC[B] = iterableFactory.from(new View.Patched(this, from, other, replaced)) /** A copy of this $coll with one single replaced element. @@ -956,7 +966,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any iterableFactory.from(new View.Updated(this, index, elem)) } - protected[collection] def occCounts[B](sq: Seq[B]): mutable.Map[B, Int] = { + protected[collection] def occCounts[sealed B](sq: Seq[B]): mutable.Map[B, Int] = { val occ = new mutable.HashMap[B, Int]() for (y <- sq) occ.updateWith(y) { case None => Some(1) @@ -1004,11 +1014,11 @@ trait SeqOps[+A, +CC[_], +C] extends Any * @return a `Found` value containing the index corresponding to the element in the * sequence, or the `InsertionPoint` where the element would be inserted if * the element is not in the sequence. - * + * * @note if `to <= from`, the search space is empty, and an `InsertionPoint` at `from` * is returned */ - def search[B >: A](elem: B, from: Int, to: Int) (implicit ord: Ordering[B]): SearchResult = + def search[B >: A](elem: B, from: Int, to: Int) (implicit ord: Ordering[B]): SearchResult = linearSearch(view.slice(from, to), elem, math.max(0, from))(ord) private[this] def linearSearch[B >: A](c: View[A], elem: B, offset: Int) diff --git a/tests/pos-special/stdlib/collection/SeqMap.scala b/tests/pos-special/stdlib/collection/SeqMap.scala index 05bf126aba02..a7f2c629b61d 100644 --- a/tests/pos-special/stdlib/collection/SeqMap.scala +++ b/tests/pos-special/stdlib/collection/SeqMap.scala @@ -11,6 +11,7 @@ */ package scala.collection +import language.experimental.captureChecking import scala.annotation.nowarn diff --git a/tests/pos-special/stdlib/collection/SeqView.scala b/tests/pos-special/stdlib/collection/SeqView.scala index ad16f01b9184..a4ca1143f8b4 100644 --- a/tests/pos-special/stdlib/collection/SeqView.scala +++ b/tests/pos-special/stdlib/collection/SeqView.scala @@ -14,26 +14,49 @@ package scala package collection import scala.annotation.nowarn +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure +import scala.annotation.unchecked.uncheckedCaptures + +/** !!! Scala 2 difference: Need intermediate trait SeqViewOps to collect the + * necessary functionality over which SeqViews are defined, and at the same + * time allowing impure operations. Scala 2 uses SeqOps here, but SeqOps is + * pure, whereas SeqViews are Iterables which can be impure (for instance, + * mapping a SeqView with an impure function gives an impure view). + */ +trait SeqViewOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { + self: SeqViewOps[A, CC, C]^ => + + def length: Int + def apply(x: Int): A + def appended[B >: A](elem: B): CC[B]^{this} + def prepended[B >: A](elem: B): CC[B]^{this} + def reverse: C^{this} + def sorted[B >: A](implicit ord: Ordering[B]): C^{this} + + def reverseIterator: Iterator[A]^{this} = reversed.iterator +} +trait SeqView[+A] extends SeqViewOps[A, View, View[A]] with View[A] { + self: SeqView[A]^ => -trait SeqView[+A] extends SeqOps[A, View, View[A]] with View[A] { - override def view: SeqView[A] = this + override def view: SeqView[A]^{this} = this - override def map[B](f: A => B): SeqView[B] = new SeqView.Map(this, f) - override def appended[B >: A](elem: B): SeqView[B] = new SeqView.Appended(this, elem) - override def prepended[B >: A](elem: B): SeqView[B] = new SeqView.Prepended(elem, this) - override def reverse: SeqView[A] = new SeqView.Reverse(this) - override def take(n: Int): SeqView[A] = new SeqView.Take(this, n) - override def drop(n: Int): SeqView[A] = new SeqView.Drop(this, n) - override def takeRight(n: Int): SeqView[A] = new SeqView.TakeRight(this, n) - override def dropRight(n: Int): SeqView[A] = new SeqView.DropRight(this, n) - override def tapEach[U](f: A => U): SeqView[A] = new SeqView.Map(this, { (a: A) => f(a); a }) + override def map[B](f: A => B): SeqView[B]^{this, f} = new SeqView.Map(this, f) + override def appended[B >: A](elem: B): SeqView[B]^{this} = new SeqView.Appended(this, elem) + override def prepended[B >: A](elem: B): SeqView[B]^{this} = new SeqView.Prepended(elem, this) + override def reverse: SeqView[A]^{this} = new SeqView.Reverse(this) + override def take(n: Int): SeqView[A]^{this} = new SeqView.Take(this, n) + override def drop(n: Int): SeqView[A]^{this} = new SeqView.Drop(this, n) + override def takeRight(n: Int): SeqView[A]^{this} = new SeqView.TakeRight(this, n) + override def dropRight(n: Int): SeqView[A]^{this} = new SeqView.DropRight(this, n) + override def tapEach[U](f: A => U): SeqView[A]^{this, f} = new SeqView.Map(this, { (a: A) => f(a); a }) - def concat[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B] = new SeqView.Concat(this, suffix) - def appendedAll[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B] = new SeqView.Concat(this, suffix) - def prependedAll[B >: A](prefix: SeqView.SomeSeqOps[B]): SeqView[B] = new SeqView.Concat(prefix, this) + def concat[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B]^{this} = new SeqView.Concat(this, suffix) + def appendedAll[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B]^{this} = new SeqView.Concat(this, suffix) + def prependedAll[B >: A](prefix: SeqView.SomeSeqOps[B]): SeqView[B]^{this} = new SeqView.Concat(prefix, this) - override def sorted[B >: A](implicit ord: Ordering[B]): SeqView[A] = new SeqView.Sorted(this, ord) + override def sorted[B >: A](implicit ord: Ordering[B]): SeqView[A]^{this} = new SeqView.Sorted(this, ord) @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") override protected[this] def stringPrefix: String = "SeqView" @@ -42,38 +65,38 @@ trait SeqView[+A] extends SeqOps[A, View, View[A]] with View[A] { object SeqView { /** A `SeqOps` whose collection type and collection type constructor are unknown */ - private type SomeSeqOps[+A] = SeqOps[A, AnyConstr, _] + private type SomeSeqOps[+A] = SeqViewOps[A, AnyConstr, _] /** A view that doesn’t apply any transformation to an underlying sequence */ @SerialVersionUID(3L) - class Id[+A](underlying: SomeSeqOps[A]) extends AbstractSeqView[A] { + class Id[+A](underlying: SomeSeqOps[A]^) extends AbstractSeqView[A] { def apply(idx: Int): A = underlying.apply(idx) def length: Int = underlying.length - def iterator: Iterator[A] = underlying.iterator + def iterator: Iterator[A]^{this} = underlying.iterator override def knownSize: Int = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } @SerialVersionUID(3L) - class Map[+A, +B](underlying: SomeSeqOps[A], f: A => B) extends View.Map[A, B](underlying, f) with SeqView[B] { + class Map[+A, +B](underlying: SomeSeqOps[A]^, f: A => B) extends View.Map[A, B](underlying, f) with SeqView[B] { def apply(idx: Int): B = f(underlying(idx)) def length: Int = underlying.length } @SerialVersionUID(3L) - class Appended[+A](underlying: SomeSeqOps[A], elem: A) extends View.Appended(underlying, elem) with SeqView[A] { + class Appended[+A](underlying: SomeSeqOps[A]^, elem: A) extends View.Appended(underlying, elem) with SeqView[A] { def apply(idx: Int): A = if (idx == underlying.length) elem else underlying(idx) def length: Int = underlying.length + 1 } @SerialVersionUID(3L) - class Prepended[+A](elem: A, underlying: SomeSeqOps[A]) extends View.Prepended(elem, underlying) with SeqView[A] { + class Prepended[+A](elem: A, underlying: SomeSeqOps[A]^) extends View.Prepended(elem, underlying) with SeqView[A] { def apply(idx: Int): A = if (idx == 0) elem else underlying(idx - 1) def length: Int = underlying.length + 1 } @SerialVersionUID(3L) - class Concat[A](prefix: SomeSeqOps[A], suffix: SomeSeqOps[A]) extends View.Concat[A](prefix, suffix) with SeqView[A] { + class Concat[A](prefix: SomeSeqOps[A]^, suffix: SomeSeqOps[A]^) extends View.Concat[A](prefix, suffix) with SeqView[A] { def apply(idx: Int): A = { val l = prefix.length if (idx < l) prefix(idx) else suffix(idx - l) @@ -82,16 +105,16 @@ object SeqView { } @SerialVersionUID(3L) - class Reverse[A](underlying: SomeSeqOps[A]) extends AbstractSeqView[A] { + class Reverse[A](underlying: SomeSeqOps[A]^) extends AbstractSeqView[A] { def apply(i: Int) = underlying.apply(size - 1 - i) def length = underlying.size - def iterator: Iterator[A] = underlying.reverseIterator + def iterator: Iterator[A]^{this} = underlying.reverseIterator override def knownSize: Int = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } @SerialVersionUID(3L) - class Take[+A](underlying: SomeSeqOps[A], n: Int) extends View.Take(underlying, n) with SeqView[A] { + class Take[+A](underlying: SomeSeqOps[A]^, n: Int) extends View.Take(underlying, n) with SeqView[A] { def apply(idx: Int): A = if (idx < n) { underlying(idx) } else { @@ -101,7 +124,7 @@ object SeqView { } @SerialVersionUID(3L) - class TakeRight[+A](underlying: SomeSeqOps[A], n: Int) extends View.TakeRight(underlying, n) with SeqView[A] { + class TakeRight[+A](underlying: SomeSeqOps[A]^, n: Int) extends View.TakeRight(underlying, n) with SeqView[A] { private[this] val delta = (underlying.size - (n max 0)) max 0 def length = underlying.size - delta @throws[IndexOutOfBoundsException] @@ -109,15 +132,15 @@ object SeqView { } @SerialVersionUID(3L) - class Drop[A](underlying: SomeSeqOps[A], n: Int) extends View.Drop[A](underlying, n) with SeqView[A] { + class Drop[A](underlying: SomeSeqOps[A]^, n: Int) extends View.Drop[A](underlying, n) with SeqView[A] { def length = (underlying.size - normN) max 0 @throws[IndexOutOfBoundsException] def apply(i: Int) = underlying.apply(i + normN) - override def drop(n: Int): SeqView[A] = new Drop(underlying, this.n + n) + override def drop(n: Int): SeqView[A]^{this} = new Drop(underlying, this.n + n) } @SerialVersionUID(3L) - class DropRight[A](underlying: SomeSeqOps[A], n: Int) extends View.DropRight[A](underlying, n) with SeqView[A] { + class DropRight[A](underlying: SomeSeqOps[A]^, n: Int) extends View.DropRight[A](underlying, n) with SeqView[A] { private[this] val len = (underlying.size - (n max 0)) max 0 def length = len @throws[IndexOutOfBoundsException] @@ -125,15 +148,15 @@ object SeqView { } @SerialVersionUID(3L) - class Sorted[A, B >: A] private (private[this] var underlying: SomeSeqOps[A], + class Sorted[A, B >: A] private (private[this] var underlying: SomeSeqOps[A]^, private[this] val len: Int, ord: Ordering[B]) extends SeqView[A] { - outer => + outer: Sorted[A, B]^ => // force evaluation immediately by calling `length` so infinite collections // hang on `sorted`/`sortWith`/`sortBy` rather than on arbitrary method calls - def this(underlying: SomeSeqOps[A], ord: Ordering[B]) = this(underlying, underlying.length, ord) + def this(underlying: SomeSeqOps[A]^, ord: Ordering[B]) = this(underlying, underlying.length, ord) @SerialVersionUID(3L) private[this] class ReverseSorted extends SeqView[A] { @@ -141,15 +164,15 @@ object SeqView { def apply(i: Int): A = _reversed.apply(i) def length: Int = len - def iterator: Iterator[A] = Iterator.empty ++ _reversed.iterator // very lazy + def iterator: Iterator[A]^{this} = Iterator.empty ++ _reversed.iterator // very lazy override def knownSize: Int = len override def isEmpty: Boolean = len == 0 override def to[C1](factory: Factory[A, C1]): C1 = _reversed.to(factory) - override def reverse: SeqView[A] = outer - override protected def reversed: Iterable[A] = outer + override def reverse: SeqView[A]^{outer} = outer + override protected def reversed: Iterable[A] = outer.unsafeAssumePure - override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A] = - if (ord1 == Sorted.this.ord) outer + override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A]^{this} = + if (ord1 == Sorted.this.ord) outer.unsafeAssumePure else if (ord1.isReverseOf(Sorted.this.ord)) this else new Sorted(elems, len, ord1) } @@ -173,7 +196,7 @@ object SeqView { // contains items of another type, we'd get a CCE anyway) // - the cast doesn't actually do anything in the runtime because the // type of A is not known and Array[_] is Array[AnyRef] - immutable.ArraySeq.unsafeWrapArray(arr.asInstanceOf[Array[A]]) + immutable.ArraySeq.unsafeWrapArray(arr.asInstanceOf[Array[A @uncheckedCaptures]]) } } evaluated = true @@ -181,14 +204,14 @@ object SeqView { res } - private[this] def elems: SomeSeqOps[A] = { + private[this] def elems: SomeSeqOps[A]^{this} = { val orig = underlying if (evaluated) _sorted else orig } def apply(i: Int): A = _sorted.apply(i) def length: Int = len - def iterator: Iterator[A] = Iterator.empty ++ _sorted.iterator // very lazy + def iterator: Iterator[A]^{this} = Iterator.empty ++ _sorted.iterator // very lazy override def knownSize: Int = len override def isEmpty: Boolean = len == 0 override def to[C1](factory: Factory[A, C1]): C1 = _sorted.to(factory) @@ -197,7 +220,7 @@ object SeqView { // so this is acceptable for `reversed` override protected def reversed: Iterable[A] = new ReverseSorted - override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A] = + override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A]^{this} = if (ord1 == this.ord) this else if (ord1.isReverseOf(this.ord)) reverse else new Sorted(elems, len, ord1) diff --git a/tests/pos-special/stdlib/collection/Set.scala b/tests/pos-special/stdlib/collection/Set.scala index 0ea1e5689473..a9c279b82a49 100644 --- a/tests/pos-special/stdlib/collection/Set.scala +++ b/tests/pos-special/stdlib/collection/Set.scala @@ -17,6 +17,7 @@ import scala.util.hashing.MurmurHash3 import java.lang.String import scala.annotation.nowarn +import language.experimental.captureChecking /** Base trait for set collections. */ @@ -24,7 +25,9 @@ trait Set[A] extends Iterable[A] with SetOps[A, Set, Set[A]] with Equals - with IterableFactoryDefaults[A, Set] { + with IterableFactoryDefaults[A, Set] + with Pure { + self: Set[A] => def canEqual(that: Any) = true @@ -86,8 +89,7 @@ trait Set[A] * @define Coll `Set` */ trait SetOps[A, +CC[_], +C <: SetOps[A, CC, C]] - extends IterableOps[A, CC, C] - with (A => Boolean) { + extends IterableOps[A, CC, C], (A -> Boolean) { self => def contains(elem: A): Boolean @@ -234,7 +236,7 @@ trait SetOps[A, +CC[_], +C <: SetOps[A, CC, C]] case that: collection.Iterable[A] => new View.Concat(this, that) case _ => iterator.concat(that.iterator) }) - } + } @deprecated("Consider requiring an immutable Set or fall back to Set.union", "2.13.0") def + (elem: A): C = fromSpecific(new View.Appended(this, elem)) diff --git a/tests/pos-special/stdlib/collection/SortedMap.scala b/tests/pos-special/stdlib/collection/SortedMap.scala index 03ab0bb0dadc..7b9381ebb078 100644 --- a/tests/pos-special/stdlib/collection/SortedMap.scala +++ b/tests/pos-special/stdlib/collection/SortedMap.scala @@ -14,6 +14,7 @@ package scala package collection import scala.annotation.{implicitNotFound, nowarn} +import language.experimental.captureChecking /** A Map whose keys are sorted according to a [[scala.math.Ordering]]*/ trait SortedMap[K, +V] @@ -49,7 +50,8 @@ trait SortedMap[K, +V] trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] extends MapOps[K, V, Map, C] - with SortedOps[K, C] { + with SortedOps[K, C] + with Pure { /** The companion object of this sorted map, providing various factory methods. * @@ -176,13 +178,13 @@ trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = sortedMapFactory.from(new View.Collect(this, pf)) - override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CC[K, V2] = sortedMapFactory.from(suffix match { + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CC[K, V2] = sortedMapFactory.from(suffix match { case it: Iterable[(K, V2)] => new View.Concat(this, it) case _ => iterator.concat(suffix.iterator) })(ordering) /** Alias for `concat` */ - @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]): CC[K, V2] = concat(xs) + @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]^): CC[K, V2] = concat(xs) @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(this, kv))(ordering) @@ -206,10 +208,10 @@ object SortedMapOps { def map[K2 : Ordering, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = self.sortedMapFactory.from(new View.Map(filtered, f)) - def flatMap[K2 : Ordering, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = + def flatMap[K2 : Ordering, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = self.sortedMapFactory.from(new View.FlatMap(filtered, f)) - override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, MapCC, CC] = + override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, MapCC, CC]^{this, q} = new WithFilter[K, V, IterableCC, MapCC, CC](self, (kv: (K, V)) => p(kv) && q(kv)) } diff --git a/tests/pos-special/stdlib/collection/SortedOps.scala b/tests/pos-special/stdlib/collection/SortedOps.scala index 64e6376be042..16751d86d9d5 100644 --- a/tests/pos-special/stdlib/collection/SortedOps.scala +++ b/tests/pos-special/stdlib/collection/SortedOps.scala @@ -12,6 +12,7 @@ package scala.collection +import language.experimental.captureChecking /** Base trait for sorted collections */ trait SortedOps[A, +C] { diff --git a/tests/pos-special/stdlib/collection/SortedSet.scala b/tests/pos-special/stdlib/collection/SortedSet.scala index c98ca9ae5523..fb2f879edcd2 100644 --- a/tests/pos-special/stdlib/collection/SortedSet.scala +++ b/tests/pos-special/stdlib/collection/SortedSet.scala @@ -14,6 +14,7 @@ package scala.collection import scala.annotation.{implicitNotFound, nowarn} import scala.annotation.unchecked.uncheckedVariance +import language.experimental.captureChecking /** Base type of sorted sets */ trait SortedSet[A] extends Set[A] @@ -68,7 +69,7 @@ trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] * @param start The lower-bound (inclusive) of the iterator */ def iteratorFrom(start: A): Iterator[A] - + @deprecated("Use `iteratorFrom` instead.", "2.13.0") @`inline` def keysIteratorFrom(start: A): Iterator[A] = iteratorFrom(start) @@ -178,7 +179,7 @@ object SortedSetOps { def flatMap[B : Ordering](f: A => IterableOnce[B]): CC[B] = self.sortedIterableFactory.from(new View.FlatMap(filtered, f)) - override def withFilter(q: A => Boolean): WithFilter[A, IterableCC, CC] = + override def withFilter(q: A => Boolean): WithFilter[A, IterableCC, CC]^{this, q} = new WithFilter[A, IterableCC, CC](self, (a: A) => p(a) && q(a)) } diff --git a/tests/pos-special/stdlib/collection/Stepper.scala b/tests/pos-special/stdlib/collection/Stepper.scala index 0eeb8a44cb72..0a0ac0075990 100644 --- a/tests/pos-special/stdlib/collection/Stepper.scala +++ b/tests/pos-special/stdlib/collection/Stepper.scala @@ -15,6 +15,7 @@ package scala.collection import java.util.function.{Consumer, DoubleConsumer, IntConsumer, LongConsumer} import java.util.{PrimitiveIterator, Spliterator, Iterator => JIterator} import java.{lang => jl} +import language.experimental.captureChecking import scala.collection.Stepper.EfficientSplit @@ -38,6 +39,8 @@ import scala.collection.Stepper.EfficientSplit * @tparam A the element type of the Stepper */ trait Stepper[@specialized(Double, Int, Long) +A] { + this: Stepper[A]^ => + /** Check if there's an element available. */ def hasStep: Boolean @@ -183,9 +186,11 @@ object Stepper { /** A Stepper for arbitrary element types. See [[Stepper]]. */ trait AnyStepper[+A] extends Stepper[A] { + this: AnyStepper[A]^ => + def trySplit(): AnyStepper[A] - def spliterator[B >: A]: Spliterator[B] = new AnyStepper.AnyStepperSpliterator(this) + def spliterator[B >: A]: Spliterator[B]^{this} = new AnyStepper.AnyStepperSpliterator(this) def javaIterator[B >: A]: JIterator[B] = new JIterator[B] { def hasNext: Boolean = hasStep @@ -194,10 +199,10 @@ trait AnyStepper[+A] extends Stepper[A] { } object AnyStepper { - class AnyStepperSpliterator[A](s: AnyStepper[A]) extends Spliterator[A] { + class AnyStepperSpliterator[A](s: AnyStepper[A]^) extends Spliterator[A] { def tryAdvance(c: Consumer[_ >: A]): Boolean = if (s.hasStep) { c.accept(s.nextStep()); true } else false - def trySplit(): Spliterator[A] = { + def trySplit(): Spliterator[A]^{this} = { val sp = s.trySplit() if (sp == null) null else sp.spliterator } @@ -253,9 +258,11 @@ object AnyStepper { /** A Stepper for Ints. See [[Stepper]]. */ trait IntStepper extends Stepper[Int] { + this: IntStepper^ => + def trySplit(): IntStepper - def spliterator[B >: Int]: Spliterator.OfInt = new IntStepper.IntStepperSpliterator(this) + def spliterator[B >: Int]: Spliterator.OfInt^{this} = new IntStepper.IntStepperSpliterator(this) def javaIterator[B >: Int]: PrimitiveIterator.OfInt = new PrimitiveIterator.OfInt { def hasNext: Boolean = hasStep @@ -263,7 +270,7 @@ trait IntStepper extends Stepper[Int] { } } object IntStepper { - class IntStepperSpliterator(s: IntStepper) extends Spliterator.OfInt { + class IntStepperSpliterator(s: IntStepper^) extends Spliterator.OfInt { def tryAdvance(c: IntConsumer): Boolean = if (s.hasStep) { c.accept(s.nextStep()); true } else false // Override for efficiency: don't wrap the function and call the `tryAdvance` overload @@ -272,7 +279,7 @@ object IntStepper { case _ => if (s.hasStep) { c.accept(jl.Integer.valueOf(s.nextStep())); true } else false } // override required for dotty#6152 - override def trySplit(): Spliterator.OfInt = { + override def trySplit(): Spliterator.OfInt^{this} = { val sp = s.trySplit() if (sp == null) null else sp.spliterator } @@ -291,18 +298,19 @@ object IntStepper { /** A Stepper for Doubles. See [[Stepper]]. */ trait DoubleStepper extends Stepper[Double] { + this: DoubleStepper^ => def trySplit(): DoubleStepper - def spliterator[B >: Double]: Spliterator.OfDouble = new DoubleStepper.DoubleStepperSpliterator(this) + def spliterator[B >: Double]: Spliterator.OfDouble^{this} = new DoubleStepper.DoubleStepperSpliterator(this) - def javaIterator[B >: Double]: PrimitiveIterator.OfDouble = new PrimitiveIterator.OfDouble { + def javaIterator[B >: Double]: PrimitiveIterator.OfDouble^{this} = new PrimitiveIterator.OfDouble { def hasNext: Boolean = hasStep def nextDouble(): Double = nextStep() } } object DoubleStepper { - class DoubleStepperSpliterator(s: DoubleStepper) extends Spliterator.OfDouble { + class DoubleStepperSpliterator(s: DoubleStepper^) extends Spliterator.OfDouble { def tryAdvance(c: DoubleConsumer): Boolean = if (s.hasStep) { c.accept(s.nextStep()); true } else false // Override for efficiency: don't wrap the function and call the `tryAdvance` overload @@ -311,7 +319,7 @@ object DoubleStepper { case _ => if (s.hasStep) { c.accept(java.lang.Double.valueOf(s.nextStep())); true } else false } // override required for dotty#6152 - override def trySplit(): Spliterator.OfDouble = { + override def trySplit(): Spliterator.OfDouble^{this} = { val sp = s.trySplit() if (sp == null) null else sp.spliterator } @@ -330,18 +338,20 @@ object DoubleStepper { /** A Stepper for Longs. See [[Stepper]]. */ trait LongStepper extends Stepper[Long] { - def trySplit(): LongStepper + this: LongStepper^ => + + def trySplit(): LongStepper^{this} - def spliterator[B >: Long]: Spliterator.OfLong = new LongStepper.LongStepperSpliterator(this) + def spliterator[B >: Long]: Spliterator.OfLong^{this} = new LongStepper.LongStepperSpliterator(this) - def javaIterator[B >: Long]: PrimitiveIterator.OfLong = new PrimitiveIterator.OfLong { + def javaIterator[B >: Long]: PrimitiveIterator.OfLong^{this} = new PrimitiveIterator.OfLong { def hasNext: Boolean = hasStep def nextLong(): Long = nextStep() } } object LongStepper { - class LongStepperSpliterator(s: LongStepper) extends Spliterator.OfLong { + class LongStepperSpliterator(s: LongStepper^) extends Spliterator.OfLong { def tryAdvance(c: LongConsumer): Boolean = if (s.hasStep) { c.accept(s.nextStep()); true } else false // Override for efficiency: don't wrap the function and call the `tryAdvance` overload @@ -350,7 +360,7 @@ object LongStepper { case _ => if (s.hasStep) { c.accept(java.lang.Long.valueOf(s.nextStep())); true } else false } // override required for dotty#6152 - override def trySplit(): Spliterator.OfLong = { + override def trySplit(): Spliterator.OfLong^{this} = { val sp = s.trySplit() if (sp == null) null else sp.spliterator } diff --git a/tests/pos-special/stdlib/collection/StepperShape.scala b/tests/pos-special/stdlib/collection/StepperShape.scala index 6712073b09e4..c6b520400d89 100644 --- a/tests/pos-special/stdlib/collection/StepperShape.scala +++ b/tests/pos-special/stdlib/collection/StepperShape.scala @@ -14,12 +14,13 @@ package scala.collection import java.{lang => jl} +import language.experimental.captureChecking import scala.collection.Stepper.EfficientSplit /** An implicit StepperShape instance is used in the [[IterableOnce.stepper]] to return a possibly * specialized Stepper `S` according to the element type `T`. */ -sealed trait StepperShape[-T, S <: Stepper[_]] { +sealed trait StepperShape[-T, S <: Stepper[_]^] extends Pure { /** Return the Int constant (as defined in the `StepperShape` companion object) for this `StepperShape`. */ def shape: StepperShape.Shape diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedIterableOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedIterableOps.scala index a09766cfa912..5b504a2469b5 100644 --- a/tests/pos-special/stdlib/collection/StrictOptimizedIterableOps.scala +++ b/tests/pos-special/stdlib/collection/StrictOptimizedIterableOps.scala @@ -16,6 +16,7 @@ package collection import scala.annotation.nowarn import scala.annotation.unchecked.uncheckedVariance import scala.runtime.Statics +import language.experimental.captureChecking /** * Trait that overrides iterable operations to take advantage of strict builders. @@ -27,6 +28,7 @@ import scala.runtime.Statics trait StrictOptimizedIterableOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { + this: StrictOptimizedIterableOps[A, CC, C] => // Optimized, push-based version of `partition` override def partition(p: A => Boolean): (C, C) = { @@ -55,7 +57,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] (first.result(), second.result()) } - override def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = { + override def unzip[A1, A2](implicit asPair: A -> (A1, A2)): (CC[A1], CC[A2]) = { val first = iterableFactory.newBuilder[A1] val second = iterableFactory.newBuilder[A2] foreach { a => @@ -66,7 +68,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] (first.result(), second.result()) } - override def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { + override def unzip3[A1, A2, A3](implicit asTriple: A -> (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { val b1 = iterableFactory.newBuilder[A1] val b2 = iterableFactory.newBuilder[A2] val b3 = iterableFactory.newBuilder[A3] @@ -102,7 +104,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] b.result() } - override def flatMap[B](f: A => IterableOnce[B]): CC[B] = + override def flatMap[B](f: A => IterableOnce[B]^): CC[B] = strictOptimizedFlatMap(iterableFactory.newBuilder, f) /** @@ -112,7 +114,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] * @tparam C2 Type of the resulting collection (e.g. `List[String]`) * @return The resulting collection */ - @inline protected[this] final def strictOptimizedFlatMap[B, C2](b: mutable.Builder[B, C2], f: A => IterableOnce[B]): C2 = { + @inline protected[this] final def strictOptimizedFlatMap[B, C2](b: mutable.Builder[B, C2], f: A => IterableOnce[B]^): C2 = { val it = iterator while (it.hasNext) { b ++= f(it.next()) @@ -127,13 +129,13 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] * @tparam C2 Type of the resulting collection (e.g. `List[Int]`) * @return The resulting collection */ - @inline protected[this] final def strictOptimizedConcat[B >: A, C2](that: IterableOnce[B], b: mutable.Builder[B, C2]): C2 = { + @inline protected[this] final def strictOptimizedConcat[B >: A, C2](that: IterableOnce[B]^, b: mutable.Builder[B, C2]): C2 = { b ++= this b ++= that b.result() } - override def collect[B](pf: PartialFunction[A, B]): CC[B] = + override def collect[B](pf: PartialFunction[A, B]^): CC[B] = strictOptimizedCollect(iterableFactory.newBuilder, pf) /** @@ -143,7 +145,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] * @tparam C2 Type of the resulting collection (e.g. `List[String]`) * @return The resulting collection */ - @inline protected[this] final def strictOptimizedCollect[B, C2](b: mutable.Builder[B, C2], pf: PartialFunction[A, B]): C2 = { + @inline protected[this] final def strictOptimizedCollect[B, C2](b: mutable.Builder[B, C2], pf: PartialFunction[A, B]^): C2 = { val marker = Statics.pfMarker val it = iterator while (it.hasNext) { @@ -154,7 +156,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] b.result() } - override def flatten[B](implicit toIterableOnce: A => IterableOnce[B]): CC[B] = + override def flatten[B](implicit toIterableOnce: A -> IterableOnce[B]): CC[B] = strictOptimizedFlatten(iterableFactory.newBuilder) /** @@ -164,7 +166,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] * @tparam C2 Type of the resulting collection (e.g. `List[Int]`) * @return The resulting collection */ - @inline protected[this] final def strictOptimizedFlatten[B, C2](b: mutable.Builder[B, C2])(implicit toIterableOnce: A => IterableOnce[B]): C2 = { + @inline protected[this] final def strictOptimizedFlatten[B, C2](b: mutable.Builder[B, C2])(implicit toIterableOnce: A -> IterableOnce[B]): C2 = { val it = iterator while (it.hasNext) { b ++= toIterableOnce(it.next()) @@ -172,7 +174,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] b.result() } - override def zip[B](that: IterableOnce[B]): CC[(A @uncheckedVariance, B)] = + override def zip[B](that: IterableOnce[B]^): CC[(A @uncheckedVariance, B)] = strictOptimizedZip(that, iterableFactory.newBuilder[(A, B)]) /** @@ -182,7 +184,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] * @tparam C2 Type of the resulting collection (e.g. `List[(Int, String)]`) * @return The resulting collection */ - @inline protected[this] final def strictOptimizedZip[B, C2](that: IterableOnce[B], b: mutable.Builder[(A, B), C2]): C2 = { + @inline protected[this] final def strictOptimizedZip[B, C2](that: IterableOnce[B]^, b: mutable.Builder[(A, B), C2]): C2 = { val it1 = iterator val it2 = that.iterator while (it1.hasNext && it2.hasNext) { diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala index 1f5791bbb718..a9c5e0af43b3 100644 --- a/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala +++ b/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala @@ -11,6 +11,7 @@ */ package scala.collection +import language.experimental.captureChecking /** * Trait that overrides map operations to take advantage of strict builders. @@ -22,15 +23,16 @@ package scala.collection */ trait StrictOptimizedMapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] extends MapOps[K, V, CC, C] - with StrictOptimizedIterableOps[(K, V), Iterable, C] { + with StrictOptimizedIterableOps[(K, V), Iterable, C] + with Pure { override def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = strictOptimizedMap(mapFactory.newBuilder, f) - override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] = strictOptimizedFlatMap(mapFactory.newBuilder, f) - override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CC[K, V2] = + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CC[K, V2] = strictOptimizedConcat(suffix, mapFactory.newBuilder) override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): CC[K2, V2] = diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala index 396e53885081..bfea9eda8bd3 100644 --- a/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala @@ -11,6 +11,8 @@ */ package scala.collection +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** * Trait that overrides operations on sequences in order @@ -21,9 +23,9 @@ trait StrictOptimizedSeqOps [+A, +CC[_], +C] with SeqOps[A, CC, C] with StrictOptimizedIterableOps[A, CC, C] { - override def distinctBy[B](f: A => B): C = { + override def distinctBy[B](f: A -> B): C = { val builder = newSpecificBuilder - val seen = mutable.HashSet.empty[B] + val seen = mutable.HashSet.empty[B @uncheckedCaptures] val it = this.iterator while (it.hasNext) { val next = it.next() @@ -52,10 +54,10 @@ trait StrictOptimizedSeqOps [+A, +CC[_], +C] b.result() } - override def appendedAll[B >: A](suffix: IterableOnce[B]): CC[B] = + override def appendedAll[B >: A](suffix: IterableOnce[B]^): CC[B] = strictOptimizedConcat(suffix, iterableFactory.newBuilder) - override def prependedAll[B >: A](prefix: IterableOnce[B]): CC[B] = { + override def prependedAll[B >: A](prefix: IterableOnce[B]^): CC[B] = { val b = iterableFactory.newBuilder[B] b ++= prefix b ++= this @@ -78,7 +80,7 @@ trait StrictOptimizedSeqOps [+A, +CC[_], +C] override def diff[B >: A](that: Seq[B]): C = if (isEmpty || that.isEmpty) coll else { - val occ = occCounts(that) + val occ = occCounts[B @uncheckedCaptures](that) val b = newSpecificBuilder for (x <- this) { occ.updateWith(x) { @@ -96,7 +98,7 @@ trait StrictOptimizedSeqOps [+A, +CC[_], +C] override def intersect[B >: A](that: Seq[B]): C = if (isEmpty || that.isEmpty) empty else { - val occ = occCounts(that) + val occ = occCounts[B @uncheckedCaptures](that) val b = newSpecificBuilder for (x <- this) { occ.updateWith(x) { diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala index 356bd2883578..8ed337fff998 100644 --- a/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala @@ -11,6 +11,7 @@ */ package scala.collection +import language.experimental.captureChecking /** * Trait that overrides set operations to take advantage of strict builders. diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala index 1beaf1662abe..9a9e6e367922 100644 --- a/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala +++ b/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala @@ -13,6 +13,7 @@ package scala.collection import scala.annotation.implicitNotFound +import language.experimental.captureChecking /** * Trait that overrides sorted map operations to take advantage of strict builders. @@ -32,7 +33,7 @@ trait StrictOptimizedSortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOp override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = strictOptimizedFlatMap(sortedMapFactory.newBuilder, f) - override def concat[V2 >: V](xs: IterableOnce[(K, V2)]): CC[K, V2] = + override def concat[V2 >: V](xs: IterableOnce[(K, V2)]^): CC[K, V2] = strictOptimizedConcat(xs, sortedMapFactory.newBuilder(ordering)) override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = diff --git a/tests/pos-special/stdlib/collection/StringOps.scala b/tests/pos-special/stdlib/collection/StringOps.scala index f0be485af8ae..3e3e2f8d872e 100644 --- a/tests/pos-special/stdlib/collection/StringOps.scala +++ b/tests/pos-special/stdlib/collection/StringOps.scala @@ -22,6 +22,7 @@ import scala.collection.mutable.StringBuilder import scala.math.{ScalaNumber, max, min} import scala.reflect.ClassTag import scala.util.matching.Regex +import language.experimental.captureChecking object StringOps { // just statics for companion class. @@ -123,7 +124,7 @@ object StringOps { * @return a new collection resulting from applying the given collection-valued function * `f` to each char of this string and concatenating the results. */ - def flatMap[B](f: Char => IterableOnce[B]): immutable.IndexedSeq[B] = { + def flatMap[B](f: Char => IterableOnce[B]^): immutable.IndexedSeq[B] = { val len = s.length val b = immutable.IndexedSeq.newBuilder[B] var i = 0 @@ -155,7 +156,7 @@ object StringOps { } /** Creates a new non-strict filter which combines this filter with the given predicate. */ - def withFilter(q: Char => Boolean): WithFilter = new WithFilter(a => p(a) && q(a), s) + def withFilter(q: Char => Boolean): WithFilter^{p, q} = new WithFilter(a => p(a) && q(a), s) } /** Avoid an allocation in [[collect]]. */ @@ -238,7 +239,7 @@ final class StringOps(private val s: String) extends AnyVal { * @return a new collection resulting from applying the given collection-valued function * `f` to each char of this string and concatenating the results. */ - def flatMap[B](f: Char => IterableOnce[B]): immutable.IndexedSeq[B] = { + def flatMap[B](f: Char => IterableOnce[B]^): immutable.IndexedSeq[B] = { val len = s.length val b = immutable.IndexedSeq.newBuilder[B] var i = 0 @@ -313,7 +314,7 @@ final class StringOps(private val s: String) extends AnyVal { * @return a new collection which contains all chars * of this string followed by all elements of `suffix`. */ - def concat[B >: Char](suffix: IterableOnce[B]): immutable.IndexedSeq[B] = { + def concat[B >: Char](suffix: IterableOnce[B]^): immutable.IndexedSeq[B] = { val b = immutable.IndexedSeq.newBuilder[B] val k = suffix.knownSize b.sizeHint(s.length + (if(k >= 0) k else 16)) @@ -329,7 +330,7 @@ final class StringOps(private val s: String) extends AnyVal { * @return a new string which contains all chars * of this string followed by all chars of `suffix`. */ - def concat(suffix: IterableOnce[Char]): String = { + def concat(suffix: IterableOnce[Char]^): String = { val k = suffix.knownSize val sb = new JStringBuilder(s.length + (if(k >= 0) k else 16)) sb.append(s) @@ -347,10 +348,10 @@ final class StringOps(private val s: String) extends AnyVal { @`inline` def concat(suffix: String): String = s + suffix /** Alias for `concat` */ - @`inline` def ++[B >: Char](suffix: Iterable[B]): immutable.IndexedSeq[B] = concat(suffix) + @`inline` def ++[B >: Char](suffix: Iterable[B]^): immutable.IndexedSeq[B] = concat(suffix) /** Alias for `concat` */ - @`inline` def ++(suffix: IterableOnce[Char]): String = concat(suffix) + @`inline` def ++(suffix: IterableOnce[Char]^): String = concat(suffix) /** Alias for `concat` */ def ++(xs: String): String = concat(xs) @@ -422,7 +423,7 @@ final class StringOps(private val s: String) extends AnyVal { @`inline` def +: (c: Char): String = prepended(c) /** A copy of the string with all elements from a collection prepended */ - def prependedAll[B >: Char](prefix: IterableOnce[B]): immutable.IndexedSeq[B] = { + def prependedAll[B >: Char](prefix: IterableOnce[B]^): immutable.IndexedSeq[B] = { val b = immutable.IndexedSeq.newBuilder[B] val k = prefix.knownSize b.sizeHint(s.length + (if(k >= 0) k else 16)) @@ -432,7 +433,7 @@ final class StringOps(private val s: String) extends AnyVal { } /** Alias for `prependedAll` */ - @`inline` def ++: [B >: Char] (prefix: IterableOnce[B]): immutable.IndexedSeq[B] = prependedAll(prefix) + @`inline` def ++: [B >: Char] (prefix: IterableOnce[B]^): immutable.IndexedSeq[B] = prependedAll(prefix) /** A copy of the string with another string prepended */ def prependedAll(prefix: String): String = prefix + s @@ -460,11 +461,11 @@ final class StringOps(private val s: String) extends AnyVal { @`inline` def :+ (c: Char): String = appended(c) /** A copy of the string with all elements from a collection appended */ - @`inline` def appendedAll[B >: Char](suffix: IterableOnce[B]): immutable.IndexedSeq[B] = + @`inline` def appendedAll[B >: Char](suffix: IterableOnce[B]^): immutable.IndexedSeq[B] = concat(suffix) /** Alias for `appendedAll` */ - @`inline` def :++ [B >: Char](suffix: IterableOnce[B]): immutable.IndexedSeq[B] = + @`inline` def :++ [B >: Char](suffix: IterableOnce[B]^): immutable.IndexedSeq[B] = concat(suffix) /** A copy of the string with another string appended */ @@ -486,7 +487,7 @@ final class StringOps(private val s: String) extends AnyVal { * except that `replaced` chars starting from `from` are replaced * by `other`. */ - def patch[B >: Char](from: Int, other: IterableOnce[B], replaced: Int): immutable.IndexedSeq[B] = { + def patch[B >: Char](from: Int, other: IterableOnce[B]^, replaced: Int): immutable.IndexedSeq[B] = { val len = s.length @`inline` def slc(off: Int, length: Int): WrappedString = new WrappedString(s.substring(off, off+length)) @@ -515,7 +516,7 @@ final class StringOps(private val s: String) extends AnyVal { * by `other`. * @note $unicodeunaware */ - def patch(from: Int, other: IterableOnce[Char], replaced: Int): String = + def patch(from: Int, other: IterableOnce[Char]^, replaced: Int): String = patch(from, other.iterator.mkString, replaced) /** Produces a new string where a slice of characters in this string is replaced by another string. @@ -963,7 +964,7 @@ final class StringOps(private val s: String) extends AnyVal { else if (s.equalsIgnoreCase("false")) false else throw new IllegalArgumentException("For input string: \""+s+"\"") - def toArray[B >: Char](implicit tag: ClassTag[B]): Array[B] = + def toArray[sealed B >: Char](implicit tag: ClassTag[B]): Array[B] = if (tag == ClassTag.Char) s.toCharArray.asInstanceOf[Array[B]] else new WrappedString(s).toArray[B] @@ -1195,7 +1196,7 @@ final class StringOps(private val s: String) extends AnyVal { * All these operations apply to those chars of this string * which satisfy the predicate `p`. */ - def withFilter(p: Char => Boolean): StringOps.WithFilter = new StringOps.WithFilter(p, s) + def withFilter(p: Char => Boolean): StringOps.WithFilter^{p} = new StringOps.WithFilter(p, s) /** The rest of the string without its first char. * @note $unicodeunaware @@ -1246,7 +1247,7 @@ final class StringOps(private val s: String) extends AnyVal { def inits: Iterator[String] = iterateUntilEmpty(_.init) // A helper for tails and inits. - private[this] def iterateUntilEmpty(f: String => String): Iterator[String] = + private[this] def iterateUntilEmpty(f: String => String): Iterator[String]^{f} = Iterator.iterate(s)(f).takeWhile(x => !x.isEmpty) ++ Iterator.single("") /** Selects all chars of this string which satisfy a predicate. */ @@ -1464,7 +1465,7 @@ final class StringOps(private val s: String) extends AnyVal { * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. */ - def lazyZip[B](that: Iterable[B]): LazyZip2[Char, B, String] = new LazyZip2(s, new WrappedString(s), that) + def lazyZip[B](that: Iterable[B]^): LazyZip2[Char, B, String]^{that} = new LazyZip2(s, new WrappedString(s), that) /* ************************************************************************************************************ @@ -1512,7 +1513,7 @@ final class StringOps(private val s: String) extends AnyVal { * @return a new string consisting of all the chars of this string without duplicates. * @note $unicodeunaware */ - def distinctBy[B](f: Char => B): String = new WrappedString(s).distinctBy(f).unwrap + def distinctBy[B](f: Char -> B): String = new WrappedString(s).distinctBy(f).unwrap /** Sorts the characters of this string according to an Ordering. * diff --git a/tests/pos-special/stdlib/collection/StringParsers.scala b/tests/pos-special/stdlib/collection/StringParsers.scala index 5479a58d485f..47281815da71 100644 --- a/tests/pos-special/stdlib/collection/StringParsers.scala +++ b/tests/pos-special/stdlib/collection/StringParsers.scala @@ -14,6 +14,7 @@ package scala package collection import scala.annotation.tailrec +import language.experimental.captureChecking /** A module containing the implementations of parsers from strings to numeric types, and boolean */ @@ -34,7 +35,7 @@ private[scala] object StringParsers { @inline private[this] final def stepToOverflow(from: String, len: Int, agg: Int, isPositive: Boolean, min: Int): Option[Int] = { @tailrec - def rec(i: Int, agg: Int): Option[Int] = + def rec(i: Int, agg: Int): Option[Int] = if (agg < min) None else if (i == len) { if (!isPositive) Some(agg) @@ -131,11 +132,11 @@ private[scala] object StringParsers { else None } } - + final def parseLong(from: String): Option[Long] = { //like parseInt, but Longer val len = from.length() - + @tailrec def step(i: Int, agg: Long, isPositive: Boolean): Option[Long] = { if (i == len) { @@ -166,7 +167,7 @@ private[scala] object StringParsers { else None } } - + //floating point final def checkFloatFormat(format: String): Boolean = { //indices are tracked with a start index which points *at* the first index @@ -192,7 +193,7 @@ private[scala] object StringParsers { else i rec(from) } - + def isHexFloatLiteral(startIndex: Int, endIndex: Int): Boolean = { def isHexDigit(ch: Char) = ((ch >= '0' && ch <= '9') || @@ -231,7 +232,7 @@ private[scala] object StringParsers { val pIndex = format.indexWhere(ch => ch == 'p' || ch == 'P', startIndex) (pIndex <= endIndex) && prefixOK(startIndex, pIndex) && postfixOK(pIndex + 1, endIndex) } - + def isDecFloatLiteral(startIndex: Int, endIndex: Int): Boolean = { //invariant: endIndex > startIndex @@ -278,7 +279,7 @@ private[scala] object StringParsers { //count 0x00 to 0x20 as "whitespace", and nothing else val unspacedStart = format.indexWhere(ch => ch.toInt > 0x20) val unspacedEnd = format.lastIndexWhere(ch => ch.toInt > 0x20) + 1 - + if (unspacedStart == -1 || unspacedStart >= unspacedEnd || unspacedEnd <= 0) false else { //all formats can have a sign @@ -305,7 +306,7 @@ private[scala] object StringParsers { } } } - + @inline def parseFloat(from: String): Option[Float] = if (checkFloatFormat(from)) Some(java.lang.Float.parseFloat(from)) diff --git a/tests/pos-special/stdlib/collection/View.scala b/tests/pos-special/stdlib/collection/View.scala index 441790c3c6e5..d91fc0c49939 100644 --- a/tests/pos-special/stdlib/collection/View.scala +++ b/tests/pos-special/stdlib/collection/View.scala @@ -15,6 +15,8 @@ package scala.collection import scala.annotation.{nowarn, tailrec} import scala.collection.mutable.{ArrayBuffer, Builder} import scala.collection.immutable.LazyList +import scala.annotation.unchecked.uncheckedCaptures +import language.experimental.captureChecking /** Views are collections whose transformation operations are non strict: the resulting elements * are evaluated only when the view is effectively traversed (e.g. using `foreach` or `foldLeft`), @@ -23,8 +25,9 @@ import scala.collection.immutable.LazyList * @define Coll `View` */ trait View[+A] extends Iterable[A] with IterableOps[A, View, View[A]] with IterableFactoryDefaults[A, View] with Serializable { + this: View[A]^ => - override def view: View[A] = this + override def view: View[A]^{this} = this override def iterableFactory: IterableFactory[View] = View @@ -55,8 +58,8 @@ object View extends IterableFactory[View] { * * @tparam A View element type */ - def fromIteratorProvider[A](it: () => Iterator[A]): View[A] = new AbstractView[A] { - def iterator = it() + def fromIteratorProvider[A](it: () => Iterator[A]^): View[A]^{it} = new AbstractView[A] { + def iterator: Iterator[A]^{it} = it() } /** @@ -67,7 +70,7 @@ object View extends IterableFactory[View] { * * @tparam E View element type */ - def from[E](it: IterableOnce[E]): View[E] = it match { + def from[E](it: IterableOnce[E]^): View[E]^{it} = it match { case it: View[E] => it case it: Iterable[E] => View.fromIteratorProvider(() => it.iterator) case _ => LazyList.from(it).view @@ -75,7 +78,7 @@ object View extends IterableFactory[View] { def empty[A]: View[A] = Empty - def newBuilder[A]: Builder[A, View[A]] = ArrayBuffer.newBuilder[A].mapResult(from) + def newBuilder[sealed A]: Builder[A, View[A]] = ArrayBuffer.newBuilder[A].mapResult(from) override def apply[A](xs: A*): View[A] = new Elems(xs: _*) @@ -97,7 +100,7 @@ object View extends IterableFactory[View] { /** A view with given elements */ @SerialVersionUID(3L) - class Elems[A](xs: A*) extends AbstractView[A] { + class Elems[A](xs: A*) extends AbstractView[A], Pure { def iterator = xs.iterator override def knownSize = xs.knownSize override def isEmpty: Boolean = xs.isEmpty @@ -106,7 +109,7 @@ object View extends IterableFactory[View] { /** A view containing the results of some element computation a number of times. */ @SerialVersionUID(3L) class Fill[A](n: Int)(elem: => A) extends AbstractView[A] { - def iterator = Iterator.fill(n)(elem) + def iterator: Iterator[A]^{elem} = Iterator.fill(n)(elem) override def knownSize: Int = 0 max n override def isEmpty: Boolean = n <= 0 } @@ -114,7 +117,7 @@ object View extends IterableFactory[View] { /** A view containing values of a given function over a range of integer values starting from 0. */ @SerialVersionUID(3L) class Tabulate[A](n: Int)(f: Int => A) extends AbstractView[A] { - def iterator: Iterator[A] = Iterator.tabulate(n)(f) + def iterator: Iterator[A]^{f} = Iterator.tabulate(n)(f) override def knownSize: Int = 0 max n override def isEmpty: Boolean = n <= 0 } @@ -122,7 +125,7 @@ object View extends IterableFactory[View] { /** A view containing repeated applications of a function to a start value */ @SerialVersionUID(3L) class Iterate[A](start: A, len: Int)(f: A => A) extends AbstractView[A] { - def iterator: Iterator[A] = Iterator.iterate(start)(f).take(len) + def iterator: Iterator[A]^{f} = Iterator.iterate(start)(f).take(len) override def knownSize: Int = 0 max len override def isEmpty: Boolean = len <= 0 } @@ -132,7 +135,7 @@ object View extends IterableFactory[View] { */ @SerialVersionUID(3L) class Unfold[A, S](initial: S)(f: S => Option[(A, S)]) extends AbstractView[A] { - def iterator: Iterator[A] = Iterator.unfold(initial)(f) + def iterator: Iterator[A]^{f} = Iterator.unfold(initial)(f) } /** An `IterableOps` whose collection type and collection type constructor are unknown */ @@ -140,14 +143,14 @@ object View extends IterableFactory[View] { /** A view that filters an underlying collection. */ @SerialVersionUID(3L) - class Filter[A](val underlying: SomeIterableOps[A], val p: A => Boolean, val isFlipped: Boolean) extends AbstractView[A] { - def iterator = underlying.iterator.filterImpl(p, isFlipped) + class Filter[A](val underlying: SomeIterableOps[A]^, val p: A => Boolean, val isFlipped: Boolean) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying, p} = underlying.iterator.filterImpl(p, isFlipped) override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = iterator.isEmpty } object Filter { - def apply[A](underlying: Iterable[A], p: A => Boolean, isFlipped: Boolean): Filter[A] = + def apply[A](underlying: Iterable[A]^, p: A => Boolean, isFlipped: Boolean): Filter[A]^{underlying, p} = underlying match { case filter: Filter[A] if filter.isFlipped == isFlipped => new Filter(filter.underlying, a => filter.p(a) && p(a), isFlipped) case _ => new Filter(underlying, p, isFlipped) @@ -156,15 +159,15 @@ object View extends IterableFactory[View] { /** A view that removes the duplicated elements as determined by the transformation function `f` */ @SerialVersionUID(3L) - class DistinctBy[A, B](underlying: SomeIterableOps[A], f: A => B) extends AbstractView[A] { - def iterator: Iterator[A] = underlying.iterator.distinctBy(f) + class DistinctBy[A, B](underlying: SomeIterableOps[A]^, f: A -> B) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = underlying.iterator.distinctBy(f) override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = underlying.isEmpty } @SerialVersionUID(3L) - class LeftPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A], f: A => Either[A1, A2]) extends AbstractView[A1] { - def iterator = new AbstractIterator[A1] { + class LeftPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A]^, f: A => Either[A1, A2]) extends AbstractView[A1] { + def iterator: Iterator[A1]^{underlying, f} = new AbstractIterator[A1] { private[this] val self = underlying.iterator private[this] var hd: A1 = _ private[this] var hdDefined: Boolean = false @@ -188,8 +191,8 @@ object View extends IterableFactory[View] { } @SerialVersionUID(3L) - class RightPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A], f: A => Either[A1, A2]) extends AbstractView[A2] { - def iterator = new AbstractIterator[A2] { + class RightPartitionMapped[A, A1, A2](underlying: SomeIterableOps[A]^, f: A => Either[A1, A2]) extends AbstractView[A2] { + def iterator: Iterator[A2]^{this} = new AbstractIterator[A2] { private[this] val self = underlying.iterator private[this] var hd: A2 = _ private[this] var hdDefined: Boolean = false @@ -214,8 +217,8 @@ object View extends IterableFactory[View] { /** A view that drops leading elements of the underlying collection. */ @SerialVersionUID(3L) - class Drop[A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { - def iterator = underlying.iterator.drop(n) + class Drop[A](underlying: SomeIterableOps[A]^, n: Int) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = underlying.iterator.drop(n) protected val normN = n max 0 override def knownSize = { val size = underlying.knownSize @@ -226,8 +229,8 @@ object View extends IterableFactory[View] { /** A view that drops trailing elements of the underlying collection. */ @SerialVersionUID(3L) - class DropRight[A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { - def iterator = dropRightIterator(underlying.iterator, n) + class DropRight[A](underlying: SomeIterableOps[A]^, n: Int) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = dropRightIterator(underlying.iterator, n) protected val normN = n max 0 override def knownSize = { val size = underlying.knownSize @@ -239,16 +242,16 @@ object View extends IterableFactory[View] { } @SerialVersionUID(3L) - class DropWhile[A](underlying: SomeIterableOps[A], p: A => Boolean) extends AbstractView[A] { - def iterator = underlying.iterator.dropWhile(p) + class DropWhile[A](underlying: SomeIterableOps[A]^, p: A => Boolean) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying, p} = underlying.iterator.dropWhile(p) override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = iterator.isEmpty } /** A view that takes leading elements of the underlying collection. */ @SerialVersionUID(3L) - class Take[+A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { - def iterator = underlying.iterator.take(n) + class Take[+A](underlying: SomeIterableOps[A]^, n: Int) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = underlying.iterator.take(n) protected val normN = n max 0 override def knownSize = { val size = underlying.knownSize @@ -259,8 +262,8 @@ object View extends IterableFactory[View] { /** A view that takes trailing elements of the underlying collection. */ @SerialVersionUID(3L) - class TakeRight[+A](underlying: SomeIterableOps[A], n: Int) extends AbstractView[A] { - def iterator = takeRightIterator(underlying.iterator, n) + class TakeRight[+A](underlying: SomeIterableOps[A]^, n: Int) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = takeRightIterator(underlying.iterator, n) protected val normN = n max 0 override def knownSize = { val size = underlying.knownSize @@ -272,15 +275,15 @@ object View extends IterableFactory[View] { } @SerialVersionUID(3L) - class TakeWhile[A](underlying: SomeIterableOps[A], p: A => Boolean) extends AbstractView[A] { - def iterator: Iterator[A] = underlying.iterator.takeWhile(p) + class TakeWhile[A](underlying: SomeIterableOps[A]^, p: A => Boolean) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying, p} = underlying.iterator.takeWhile(p) override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = iterator.isEmpty } @SerialVersionUID(3L) - class ScanLeft[+A, +B](underlying: SomeIterableOps[A], z: B, op: (B, A) => B) extends AbstractView[B] { - def iterator: Iterator[B] = underlying.iterator.scanLeft(z)(op) + class ScanLeft[+A, +B](underlying: SomeIterableOps[A]^, z: B, op: (B, A) => B) extends AbstractView[B] { + def iterator: Iterator[B]^{underlying, op} = underlying.iterator.scanLeft(z)(op) override def knownSize: Int = { val size = underlying.knownSize if (size >= 0) size + 1 else -1 @@ -290,32 +293,32 @@ object View extends IterableFactory[View] { /** A view that maps elements of the underlying collection. */ @SerialVersionUID(3L) - class Map[+A, +B](underlying: SomeIterableOps[A], f: A => B) extends AbstractView[B] { - def iterator = underlying.iterator.map(f) + class Map[+A, +B](underlying: SomeIterableOps[A]^, f: A => B) extends AbstractView[B] { + def iterator: Iterator[B]^{underlying, f} = underlying.iterator.map(f) override def knownSize = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } /** A view that flatmaps elements of the underlying collection. */ @SerialVersionUID(3L) - class FlatMap[A, B](underlying: SomeIterableOps[A], f: A => IterableOnce[B]) extends AbstractView[B] { - def iterator = underlying.iterator.flatMap(f) + class FlatMap[A, B](underlying: SomeIterableOps[A]^, f: A => IterableOnce[B]^) extends AbstractView[B] { + def iterator: Iterator[B]^{underlying, f} = underlying.iterator.flatMap(f) override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = iterator.isEmpty } /** A view that collects elements of the underlying collection. */ @SerialVersionUID(3L) - class Collect[+A, B](underlying: SomeIterableOps[A], pf: PartialFunction[A, B]) extends AbstractView[B] { - def iterator = underlying.iterator.collect(pf) + class Collect[+A, B](underlying: SomeIterableOps[A]^, pf: PartialFunction[A, B]^) extends AbstractView[B] { + def iterator: Iterator[B]^{underlying, pf} = underlying.iterator.collect(pf) } /** A view that concatenates elements of the prefix collection or iterator with the elements * of the suffix collection or iterator. */ @SerialVersionUID(3L) - class Concat[A](prefix: SomeIterableOps[A], suffix: SomeIterableOps[A]) extends AbstractView[A] { - def iterator = prefix.iterator ++ suffix.iterator + class Concat[A](prefix: SomeIterableOps[A]^, suffix: SomeIterableOps[A]^) extends AbstractView[A] { + def iterator: Iterator[A]^{prefix, suffix} = prefix.iterator ++ suffix.iterator override def knownSize = { val prefixSize = prefix.knownSize if (prefixSize >= 0) { @@ -332,8 +335,8 @@ object View extends IterableFactory[View] { * of another collection. */ @SerialVersionUID(3L) - class Zip[A, B](underlying: SomeIterableOps[A], other: Iterable[B]) extends AbstractView[(A, B)] { - def iterator = underlying.iterator.zip(other) + class Zip[A, B](underlying: SomeIterableOps[A]^, other: Iterable[B]^) extends AbstractView[(A, B)] { + def iterator: Iterator[(A, B)]^{underlying, other} = underlying.iterator.zip(other) override def knownSize = { val s1 = underlying.knownSize if (s1 == 0) 0 else { @@ -349,8 +352,8 @@ object View extends IterableFactory[View] { * placeholder elements are used to extend the shorter collection to the length of the longer. */ @SerialVersionUID(3L) - class ZipAll[A, B](underlying: SomeIterableOps[A], other: Iterable[B], thisElem: A, thatElem: B) extends AbstractView[(A, B)] { - def iterator = underlying.iterator.zipAll(other, thisElem, thatElem) + class ZipAll[A, B](underlying: SomeIterableOps[A]^, other: Iterable[B]^, thisElem: A, thatElem: B) extends AbstractView[(A, B)] { + def iterator: Iterator[(A, B)]^{underlying, other} = underlying.iterator.zipAll(other, thisElem, thatElem) override def knownSize = { val s1 = underlying.knownSize if(s1 == -1) -1 else { @@ -363,8 +366,10 @@ object View extends IterableFactory[View] { /** A view that appends an element to its elements */ @SerialVersionUID(3L) - class Appended[+A](underlying: SomeIterableOps[A], elem: A) extends AbstractView[A] { - def iterator: Iterator[A] = new Concat(underlying, new View.Single(elem)).iterator + class Appended[+A](underlying: SomeIterableOps[A]^, elem: A) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = + val ct = new Concat(underlying, new View.Single(elem)) + ct.iterator // CC TODO breakout into `ct` needed, otherwise "cannot establish a reference" error override def knownSize: Int = { val size = underlying.knownSize if (size >= 0) size + 1 else -1 @@ -374,8 +379,10 @@ object View extends IterableFactory[View] { /** A view that prepends an element to its elements */ @SerialVersionUID(3L) - class Prepended[+A](elem: A, underlying: SomeIterableOps[A]) extends AbstractView[A] { - def iterator: Iterator[A] = new Concat(new View.Single(elem), underlying).iterator + class Prepended[+A](elem: A, underlying: SomeIterableOps[A]^) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = + val ct = new Concat(new View.Single(elem), underlying) + ct.iterator // CC TODO breakout into `ct` needed, otherwise "cannot establish a reference" error override def knownSize: Int = { val size = underlying.knownSize if (size >= 0) size + 1 else -1 @@ -384,8 +391,8 @@ object View extends IterableFactory[View] { } @SerialVersionUID(3L) - class Updated[A](underlying: SomeIterableOps[A], index: Int, elem: A) extends AbstractView[A] { - def iterator: Iterator[A] = new AbstractIterator[A] { + class Updated[A](underlying: SomeIterableOps[A]^, index: Int, elem: A) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = new AbstractIterator[A] { private[this] val it = underlying.iterator private[this] var i = 0 def next(): A = { @@ -403,28 +410,28 @@ object View extends IterableFactory[View] { } @SerialVersionUID(3L) - private[collection] class Patched[A](underlying: SomeIterableOps[A], from: Int, other: IterableOnce[A], replaced: Int) extends AbstractView[A] { + private[collection] class Patched[A](underlying: SomeIterableOps[A]^, from: Int, other: IterableOnce[A]^, replaced: Int) extends AbstractView[A] { // we may be unable to traverse `other` more than once, so we need to cache it if that's the case - private val _other: Iterable[A] = other match { + private val _other: Iterable[A]^{other} = other match { case other: Iterable[A] => other case other => LazyList.from(other) } - def iterator: Iterator[A] = underlying.iterator.patch(from, _other.iterator, replaced) + def iterator: Iterator[A]^{underlying, other} = underlying.iterator.patch(from, _other.iterator, replaced) override def knownSize: Int = if (underlying.knownSize == 0 && _other.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = if (knownSize == 0) true else iterator.isEmpty } @SerialVersionUID(3L) - class ZipWithIndex[A](underlying: SomeIterableOps[A]) extends AbstractView[(A, Int)] { - def iterator: Iterator[(A, Int)] = underlying.iterator.zipWithIndex + class ZipWithIndex[A](underlying: SomeIterableOps[A]^) extends AbstractView[(A, Int)] { + def iterator: Iterator[(A, Int)]^{underlying} = underlying.iterator.zipWithIndex override def knownSize: Int = underlying.knownSize override def isEmpty: Boolean = underlying.isEmpty } @SerialVersionUID(3L) - class PadTo[A](underlying: SomeIterableOps[A], len: Int, elem: A) extends AbstractView[A] { - def iterator: Iterator[A] = underlying.iterator.padTo(len, elem) + class PadTo[A](underlying: SomeIterableOps[A]^, len: Int, elem: A) extends AbstractView[A] { + def iterator: Iterator[A]^{underlying} = underlying.iterator.padTo(len, elem) override def knownSize: Int = { val size = underlying.knownSize @@ -433,7 +440,7 @@ object View extends IterableFactory[View] { override def isEmpty: Boolean = underlying.isEmpty && len <= 0 } - private[collection] def takeRightIterator[A](it: Iterator[A], n: Int): Iterator[A] = { + private[collection] def takeRightIterator[A](it: Iterator[A]^, n: Int): Iterator[A]^{it} = { val k = it.knownSize if(k == 0 || n <= 0) Iterator.empty else if(n == Int.MaxValue) it @@ -441,22 +448,23 @@ object View extends IterableFactory[View] { else new TakeRightIterator[A](it, n) } - private final class TakeRightIterator[A](private[this] var underlying: Iterator[A], maxlen: Int) extends AbstractIterator[A] { + private final class TakeRightIterator[A](underlying: Iterator[A]^, maxlen: Int) extends AbstractIterator[A] { + private[this] var current: Iterator[A @uncheckedCaptures]^{underlying} = underlying private[this] var len: Int = -1 private[this] var pos: Int = 0 private[this] var buf: ArrayBuffer[AnyRef] = _ def init(): Unit = if(buf eq null) { buf = new ArrayBuffer[AnyRef](maxlen min 256) len = 0 - while(underlying.hasNext) { - val n = underlying.next().asInstanceOf[AnyRef] + while(current.hasNext) { + val n = current.next().asInstanceOf[AnyRef] if(pos >= buf.length) buf.addOne(n) else buf(pos) = n pos += 1 if(pos == maxlen) pos = 0 len += 1 } - underlying = null + current = null if(len > maxlen) len = maxlen pos = pos - len if(pos < 0) pos += maxlen @@ -477,7 +485,7 @@ object View extends IterableFactory[View] { x } } - override def drop(n: Int): Iterator[A] = { + override def drop(n: Int): Iterator[A]^{this} = { init() if (n > 0) { len = (len - n) max 0 @@ -487,7 +495,7 @@ object View extends IterableFactory[View] { } } - private[collection] def dropRightIterator[A](it: Iterator[A], n: Int): Iterator[A] = { + private[collection] def dropRightIterator[A](it: Iterator[A]^, n: Int): Iterator[A]^{it} = { if(n <= 0) it else { val k = it.knownSize @@ -496,7 +504,7 @@ object View extends IterableFactory[View] { } } - private final class DropRightIterator[A](private[this] var underlying: Iterator[A], maxlen: Int) extends AbstractIterator[A] { + private final class DropRightIterator[A](underlying: Iterator[A]^, maxlen: Int) extends AbstractIterator[A] { private[this] var len: Int = -1 // known size or -1 if the end of `underlying` has not been seen yet private[this] var pos: Int = 0 private[this] var buf: ArrayBuffer[AnyRef] = _ diff --git a/tests/pos-special/stdlib/collection/WithFilter.scala b/tests/pos-special/stdlib/collection/WithFilter.scala index 4699abbef5a7..0f3830e9fe25 100644 --- a/tests/pos-special/stdlib/collection/WithFilter.scala +++ b/tests/pos-special/stdlib/collection/WithFilter.scala @@ -11,6 +11,7 @@ */ package scala.collection +import language.experimental.captureChecking /** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods * of trait `Iterable`. @@ -22,6 +23,7 @@ package scala.collection */ @SerialVersionUID(3L) abstract class WithFilter[+A, +CC[_]] extends Serializable { + this: WithFilter[A, CC]^ => /** Builds a new collection by applying a function to all elements of the * `filtered` outer $coll. @@ -32,7 +34,7 @@ abstract class WithFilter[+A, +CC[_]] extends Serializable { * the given function `f` to each element of the filtered outer $coll * and collecting the results. */ - def map[B](f: A => B): CC[B] + def map[B](f: A => B): CC[B]^{this, f} /** Builds a new collection by applying a function to all elements of the * `filtered` outer $coll containing this `WithFilter` instance that satisfy @@ -44,7 +46,7 @@ abstract class WithFilter[+A, +CC[_]] extends Serializable { * of the filtered outer $coll and * concatenating the results. */ - def flatMap[B](f: A => IterableOnce[B]): CC[B] + def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} /** Applies a function `f` to all elements of the `filtered` outer $coll. * @@ -65,6 +67,6 @@ abstract class WithFilter[+A, +CC[_]] extends Serializable { * All these operations apply to those elements of this $coll which * also satisfy both `p` and `q` predicates. */ - def withFilter(q: A => Boolean): WithFilter[A, CC] + def withFilter(q: A => Boolean): WithFilter[A, CC]^{this, q} } diff --git a/tests/pos-special/stdlib/collection/concurrent/Map.scala b/tests/pos-special/stdlib/collection/concurrent/Map.scala index c2b996b93102..d985dad2edc5 100644 --- a/tests/pos-special/stdlib/collection/concurrent/Map.scala +++ b/tests/pos-special/stdlib/collection/concurrent/Map.scala @@ -13,6 +13,7 @@ package scala package collection.concurrent +import language.experimental.captureChecking import scala.annotation.tailrec /** A template trait for mutable maps that allow concurrent access. diff --git a/tests/pos-special/stdlib/collection/concurrent/TrieMap.scala b/tests/pos-special/stdlib/collection/concurrent/TrieMap.scala index e4aa8c8c52a7..0824ecc44519 100644 --- a/tests/pos-special/stdlib/collection/concurrent/TrieMap.scala +++ b/tests/pos-special/stdlib/collection/concurrent/TrieMap.scala @@ -23,8 +23,10 @@ import scala.collection.immutable.{List, Nil} import scala.collection.mutable.GrowableBuilder import scala.util.Try import scala.util.hashing.Hashing +import language.experimental.captureChecking +import caps.unsafe.unsafeAssumePure -private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: Equiv[K]) extends INodeBase[K, V](g) { +private[collection] final class INode[sealed K, sealed V](bn: MainNode[K, V], g: Gen, equiv: Equiv[K]) extends INodeBase[K, V](g) { import INodeBase._ WRITE(bn) @@ -427,7 +429,7 @@ private[concurrent] object INode { final val KEY_ABSENT = new AnyRef final val KEY_PRESENT_OR_ABSENT = new AnyRef - def newRootNode[K, V](equiv: Equiv[K]) = { + def newRootNode[sealed K, sealed V](equiv: Equiv[K]) = { val gen = new Gen val cn = new CNode[K, V](0, new Array(0), gen) new INode[K, V](cn, gen, equiv) @@ -435,7 +437,7 @@ private[concurrent] object INode { } -private[concurrent] final class FailedNode[K, V](p: MainNode[K, V]) extends MainNode[K, V] { +private[concurrent] final class FailedNode[sealed K, sealed V](p: MainNode[K, V]) extends MainNode[K, V] { WRITE_PREV(p) def string(lev: Int) = throw new UnsupportedOperationException @@ -448,12 +450,12 @@ private[concurrent] final class FailedNode[K, V](p: MainNode[K, V]) extends Main } -private[concurrent] trait KVNode[K, V] { +private[concurrent] trait KVNode[sealed K, sealed V] { def kvPair: (K, V) } -private[collection] final class SNode[K, V](final val k: K, final val v: V, final val hc: Int) +private[collection] final class SNode[sealed K, sealed V](final val k: K, final val v: V, final val hc: Int) extends BasicNode with KVNode[K, V] { def copy = new SNode(k, v, hc) def copyTombed = new TNode(k, v, hc) @@ -463,7 +465,7 @@ private[collection] final class SNode[K, V](final val k: K, final val v: V, fina } // Tomb Node, used to ensure proper ordering during removals -private[collection] final class TNode[K, V](final val k: K, final val v: V, final val hc: Int) +private[collection] final class TNode[sealed K, sealed V](final val k: K, final val v: V, final val hc: Int) extends MainNode[K, V] with KVNode[K, V] { def copy = new TNode(k, v, hc) def copyTombed = new TNode(k, v, hc) @@ -475,7 +477,7 @@ private[collection] final class TNode[K, V](final val k: K, final val v: V, fina } // List Node, leaf node that handles hash collisions -private[collection] final class LNode[K, V](val entries: List[(K, V)], equiv: Equiv[K]) +private[collection] final class LNode[sealed K, sealed V](val entries: List[(K, V)], equiv: Equiv[K]) extends MainNode[K, V] { def this(k: K, v: V, equiv: Equiv[K]) = this((k -> v) :: Nil, equiv) @@ -517,7 +519,7 @@ private[collection] final class LNode[K, V](val entries: List[(K, V)], equiv: Eq } // Ctrie Node, contains bitmap and array of references to branch nodes -private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[BasicNode], val gen: Gen) extends CNodeBase[K, V] { +private[collection] final class CNode[sealed K, sealed V](val bitmap: Int, val array: Array[BasicNode], val gen: Gen) extends CNodeBase[K, V] { // this should only be called from within read-only snapshots def cachedSize(ct: AnyRef): Int = { val currsz = READ_SIZE() @@ -653,7 +655,7 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba private[concurrent] object CNode { - def dual[K, V](x: SNode[K, V], xhc: Int, y: SNode[K, V], yhc: Int, lev: Int, gen: Gen, equiv: Equiv[K]): MainNode[K, V] = if (lev < 35) { + def dual[sealed K, sealed V](x: SNode[K, V], xhc: Int, y: SNode[K, V], yhc: Int, lev: Int, gen: Gen, equiv: Equiv[K]): MainNode[K, V] = if (lev < 35) { val xidx = (xhc >>> lev) & 0x1f val yidx = (yhc >>> lev) & 0x1f val bmp = (1 << xidx) | (1 << yidx) @@ -688,7 +690,7 @@ private[concurrent] case class RDCSS_Descriptor[K, V](old: INode[K, V], expected * For details, see: [[http://lampwww.epfl.ch/~prokopec/ctries-snapshot.pdf]] */ @SerialVersionUID(-5212455458703321708L) -final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[TrieMap[K, V], AnyRef], hashf: Hashing[K], ef: Equiv[K]) +final class TrieMap[sealed K, sealed V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[TrieMap[K, V], AnyRef], hashf: Hashing[K], ef: Equiv[K]) extends scala.collection.mutable.AbstractMap[K, V] with scala.collection.concurrent.Map[K, V] with scala.collection.mutable.MapOps[K, V, TrieMap, TrieMap[K, V]] @@ -1017,10 +1019,10 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater override def view: MapView[K, V] = if (nonReadOnly) readOnlySnapshot().view else super.view @deprecated("Use .view.filterKeys(f). A future version will include a strict version of this method (for now, .view.filterKeys(p).toMap).", "2.13.0") - override def filterKeys(p: K => Boolean): collection.MapView[K, V] = view.filterKeys(p) + override def filterKeys(p: K => Boolean): collection.MapView[K, V]^{p} = view.filterKeys(p) @deprecated("Use .view.mapValues(f). A future version will include a strict version of this method (for now, .view.mapValues(f).toMap).", "2.13.0") - override def mapValues[W](f: V => W): collection.MapView[K, W] = view.mapValues(f) + override def mapValues[W](f: V => W): collection.MapView[K, W]^{f} = view.mapValues(f) // END extra overrides /////////////////////////////////////////////////////////////////// @@ -1041,11 +1043,11 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater @SerialVersionUID(3L) object TrieMap extends MapFactory[TrieMap] { - def empty[K, V]: TrieMap[K, V] = new TrieMap[K, V] + def empty[sealed K, sealed V]: TrieMap[K, V] = new TrieMap[K, V] - def from[K, V](it: IterableOnce[(K, V)]): TrieMap[K, V] = new TrieMap[K, V]() ++= it + def from[sealed K, sealed V](it: IterableOnce[(K, V)]^): TrieMap[K, V] = new TrieMap[K, V]() ++= it - def newBuilder[K, V]: mutable.GrowableBuilder[(K, V), TrieMap[K, V]] = new GrowableBuilder(empty[K, V]) + def newBuilder[sealed K, sealed V]: mutable.GrowableBuilder[(K, V), TrieMap[K, V]] = new GrowableBuilder(empty[K, V]) @transient val inodeupdater: AtomicReferenceFieldUpdater[INodeBase[_, _], MainNode[_, _]] = AtomicReferenceFieldUpdater.newUpdater(classOf[INodeBase[_, _]], classOf[MainNode[_, _]], "mainnode") @@ -1069,7 +1071,7 @@ object TrieMap extends MapFactory[TrieMap] { } // non-final as an extension point for parallel collections -private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: TrieMap[K, V], mustInit: Boolean = true) extends AbstractIterator[(K, V)] { +private[collection] class TrieMapIterator[sealed K, sealed V](var level: Int, private var ct: TrieMap[K, V], mustInit: Boolean = true) extends AbstractIterator[(K, V)] { private val stack = new Array[Array[BasicNode]](7) private val stackpos = new Array[Int](7) private var depth = -1 @@ -1182,7 +1184,10 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: stack(d) = arr1 stackpos(d) = -1 val it = newIterator(level + 1, ct, _mustInit = false) - it.stack(0) = arr2 + val xss: Array[Array[BasicNode]] = it.stack.asInstanceOf + // !!! cc split into separate xss and asInstanceOf needed because cc gets confused with + // two-dimensinal invariant arrays + xss(0) = arr2 it.stackpos(0) = -1 it.depth = 0 it.advance() // <-- fix it diff --git a/tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala b/tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala index 3d155337aa93..bfae792c5107 100644 --- a/tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala +++ b/tests/pos-special/stdlib/collection/convert/AsJavaConverters.scala @@ -18,6 +18,7 @@ import java.util.{concurrent => juc} import java.{lang => jl, util => ju} import scala.{unchecked => uc} +import language.experimental.captureChecking /** Defines converter methods from Scala to Java collections. * These methods are available through the [[scala.jdk.javaapi.CollectionConverters]] object. diff --git a/tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala b/tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala index 16b15c513a17..14268f7aa165 100644 --- a/tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala +++ b/tests/pos-special/stdlib/collection/convert/AsJavaExtensions.scala @@ -16,6 +16,7 @@ package convert import java.util.{concurrent => juc} import java.{lang => jl, util => ju} +import language.experimental.captureChecking /** Defines `asJava` extension methods, available through [[scala.jdk.CollectionConverters]]. */ trait AsJavaExtensions { diff --git a/tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala b/tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala index 30a28ae38147..6cc02b13bb06 100644 --- a/tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala +++ b/tests/pos-special/stdlib/collection/convert/AsScalaConverters.scala @@ -18,6 +18,7 @@ import java.util.{concurrent => juc} import java.{lang => jl, util => ju} import scala.{unchecked => uc} +import language.experimental.captureChecking /** Defines converter methods from Java to Scala collections. * These methods are available through the [[scala.jdk.javaapi.CollectionConverters]] object. diff --git a/tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala b/tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala index 39347dde903b..d60bfc7f60a1 100644 --- a/tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala +++ b/tests/pos-special/stdlib/collection/convert/AsScalaExtensions.scala @@ -16,6 +16,7 @@ package convert import java.util.{concurrent => juc} import java.{lang => jl, util => ju} +import language.experimental.captureChecking /** Defines `asScala` extension methods, available through [[scala.jdk.CollectionConverters]]. */ trait AsScalaExtensions { diff --git a/tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala b/tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala index 05d63f9fdeee..1bc284462ff1 100644 --- a/tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala +++ b/tests/pos-special/stdlib/collection/convert/ImplicitConversions.scala @@ -19,6 +19,7 @@ import java.{lang => jl, util => ju} import scala.collection.JavaConverters._ import scala.language.implicitConversions +import language.experimental.captureChecking /** Defines implicit converter methods from Java to Scala collections. */ @deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0") diff --git a/tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala b/tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala index 29c3dcbac5db..e826bdeb23db 100644 --- a/tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala +++ b/tests/pos-special/stdlib/collection/convert/JavaCollectionWrappers.scala @@ -22,6 +22,8 @@ import scala.jdk.CollectionConverters._ import scala.util.Try import scala.util.chaining._ import scala.util.control.ControlThrowable +import language.experimental.captureChecking +import annotation.unchecked.uncheckedCaptures /** Wrappers for exposing Scala collections as Java collections and vice-versa */ @SerialVersionUID(3L) @@ -127,7 +129,7 @@ private[collection] object JavaCollectionWrappers extends Serializable { def prepend(elem: A) = { underlying.subList(0, 0) add elem; this } def addOne(elem: A): this.type = { underlying add elem; this } def insert(idx: Int,elem: A): Unit = underlying.subList(0, idx).add(elem) - def insertAll(i: Int, elems: IterableOnce[A]) = { + def insertAll(i: Int, elems: IterableOnce[A]^) = { val ins = underlying.subList(0, i) elems.iterator.foreach(ins.add(_)) } @@ -136,7 +138,7 @@ private[collection] object JavaCollectionWrappers extends Serializable { // Note: Clone cannot just call underlying.clone because in Java, only specific collections // expose clone methods. Generically, they're protected. override def clone(): JListWrapper[A] = new JListWrapper(new ju.ArrayList[A](underlying)) - def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A], replaced: Int): this.type = { + def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A]^, replaced: Int): this.type = { remove(from, replaced) insertAll(from, patch) this @@ -254,7 +256,7 @@ private[collection] object JavaCollectionWrappers extends Serializable { def getKey = k def getValue = v def setValue(v1 : V) = self.put(k, v1) - + // It's important that this implementation conform to the contract // specified in the javadocs of java.util.Map.Entry.hashCode // @@ -358,7 +360,7 @@ private[collection] object JavaCollectionWrappers extends Serializable { val result = underlying.put(k, v) if (present) Some(result) else None } else { - var result: Option[V] = None + var result: Option[V @uncheckedCaptures] = None def recompute(k0: K, v0: V): V = v.tap(_ => if (v0 != null) result = Some(v0) else if (underlying.containsKey(k0)) result = Some(null.asInstanceOf[V]) @@ -384,7 +386,7 @@ private[collection] object JavaCollectionWrappers extends Serializable { // support Some(null) if currently bound to null override def remove(k: K): Option[V] = { - var result: Option[V] = None + var result: Option[V @uncheckedCaptures] = None def recompute(k0: K, v0: V): V = { if (v0 != null) result = Some(v0) else if (underlying.containsKey(k0)) result = Some(null.asInstanceOf[V]) diff --git a/tests/pos-special/stdlib/collection/convert/StreamExtensions.scala b/tests/pos-special/stdlib/collection/convert/StreamExtensions.scala index cdeea62fb5ed..ddda95707881 100644 --- a/tests/pos-special/stdlib/collection/convert/StreamExtensions.scala +++ b/tests/pos-special/stdlib/collection/convert/StreamExtensions.scala @@ -22,6 +22,7 @@ import scala.collection._ import scala.collection.convert.StreamExtensions.{AccumulatorFactoryInfo, StreamShape, StreamUnboxer} import scala.jdk.CollectionConverters._ import scala.jdk._ +import language.experimental.captureChecking /** Defines extension methods to create Java Streams for Scala collections, available through * [[scala.jdk.javaapi.StreamConverters]]. diff --git a/tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala index 845ecb4a606d..ba51c7a5a353 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/ArrayStepper.scala @@ -14,6 +14,7 @@ package scala.collection.convert package impl import scala.collection._ +// import language.experimental.captureChecking // TODO enable private[collection] class ObjectArrayStepper[A <: Object](underlying: Array[A], _i0: Int, _iN: Int) extends IndexedStepperBase[AnyStepper[A], ObjectArrayStepper[A]](_i0, _iN) diff --git a/tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala index 7c795aea5391..8b2f604b0977 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/BinaryTreeStepper.scala @@ -18,6 +18,7 @@ import java.util.Spliterator import annotation.tailrec import scala.collection.Stepper.EfficientSplit import scala.collection._ +// import language.experimental.captureChecking // TODO enable private[collection] object BinaryTreeStepper { @@ -125,7 +126,7 @@ extends EfficientSplit { if (!hasStep || index < 0) null else { val root = stack(0).asInstanceOf[T] - val leftStack = + val leftStack = if (index > 0) java.util.Arrays.copyOfRange(stack, 1, index+1) else BinaryTreeStepper.emptyStack val leftIndex = index - 1 diff --git a/tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala index 574e7fd50f1c..16801089c39f 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/BitSetStepper.scala @@ -15,11 +15,12 @@ package impl import scala.collection.Stepper.EfficientSplit import scala.collection.{BitSetOps, IntStepper, Stepper} +// import language.experimental.captureChecking // TODO enable private[collection] final class BitSetStepper( - private var underlying: BitSetOps[_], - private var cache0: Long, private var cache1: Long, + private var underlying: BitSetOps[_], + private var cache0: Long, private var cache1: Long, _i0: Int, _iN: Int, private var cacheIndex: Int ) @@ -47,7 +48,7 @@ with IntStepper { findNext() } } - else if (underlying eq null) { + else if (underlying eq null) { i0 = iN found = false found @@ -96,7 +97,7 @@ with IntStepper { else scanLong(bits, from + 1) def nextStep(): Int = - if (found || findNext()) { + if (found || findNext()) { found = false val ans = i0 i0 += 1 diff --git a/tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala index 466e6c440f45..12fb471ea768 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/ChampStepper.scala @@ -16,6 +16,7 @@ package impl import scala.collection.Stepper.EfficientSplit import scala.collection._ import scala.collection.immutable.Node +// import language.experimental.captureChecking // TODO enable /** A stepper that is a slightly elaborated version of the ChampBaseIterator; * the main difference is that it knows when it should stop instead of running diff --git a/tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala b/tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala index 2d1f88d02930..7140c7d673d0 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/InOrderStepperBase.scala @@ -16,6 +16,7 @@ package impl import java.util.Spliterator import scala.collection.Stepper.EfficientSplit +// import language.experimental.captureChecking // TODO enable /** Abstracts all the generic operations of stepping over a collection * that has an indexable ordering but may have gaps. diff --git a/tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala index 136ac8d2dcc3..1e2983fde50d 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/IndexedSeqStepper.scala @@ -14,6 +14,7 @@ package scala.collection.convert package impl import scala.collection._ +// import language.experimental.captureChecking // TODO enable private[collection] class AnyIndexedSeqStepper[A](underlying: collection.IndexedSeqOps[A, AnyConstr, _], _i0: Int, _iN: Int) extends IndexedStepperBase[AnyStepper[A], AnyIndexedSeqStepper[A]](_i0, _iN) diff --git a/tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala b/tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala index 4670ccc56bfc..cae3809ab077 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/IndexedStepperBase.scala @@ -16,6 +16,7 @@ package impl import java.util.Spliterator import scala.collection.Stepper.EfficientSplit +// import language.experimental.captureChecking // TODO enable /** Abstracts all the generic operations of stepping over an indexable collection */ private[convert] abstract class IndexedStepperBase[Sub >: Null, Semi <: Sub](protected var i0: Int, protected var iN: Int) diff --git a/tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala index 68b318c04c9c..393e988959eb 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/IteratorStepper.scala @@ -17,6 +17,7 @@ import java.util.Spliterator import scala.collection.{AnyStepper, DoubleStepper, IntStepper, LongStepper, Stepper} import scala.jdk.{AnyAccumulator, DoubleAccumulator, IntAccumulator, LongAccumulator} +// import language.experimental.captureChecking // TODO enable private[collection] class AnyIteratorStepper[A](_underlying: Iterator[A]) extends IteratorStepperBase[A, AnyStepper[A], AnyIteratorStepper[A]](_underlying) diff --git a/tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala index 89e17bbf467c..7c122f901839 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/NumericRangeStepper.scala @@ -15,6 +15,7 @@ package impl import scala.collection.{AnyStepper, IntStepper, LongStepper, Stepper} import scala.collection.immutable.NumericRange +// import language.experimental.captureChecking // TODO enable private[collection] class AnyNumericRangeStepper[A](underlying: NumericRange[A], _i0: Int, _iN: Int) extends IndexedStepperBase[AnyStepper[A], AnyNumericRangeStepper[A]](_i0, _iN) diff --git a/tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala index 282ddb4aa2ad..50ab623a014e 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/RangeStepper.scala @@ -14,6 +14,7 @@ package scala.collection.convert package impl import scala.collection.{IntStepper, Stepper} +// import language.experimental.captureChecking // TODO enable /** Implements Stepper on an integer Range. You don't actually need the Range to do this, * so only the relevant parts are included. Because the arguments are protected, they are @@ -27,7 +28,7 @@ with IntStepper { val ans = myNext myNext += myStep i0 += 1 - ans + ans } else Stepper.throwNSEE() protected def semiclone(half: Int): RangeStepper = new RangeStepper(myNext, myStep, i0, half) diff --git a/tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala index 8990f462b4fd..fe127b857c45 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/StringStepper.scala @@ -18,6 +18,7 @@ import java.util.Spliterator import scala.collection.Stepper.EfficientSplit import scala.collection.{IntStepper, Stepper} +// import language.experimental.captureChecking // TODO enable /** Implements `Stepper` on a `String` where you step through chars packed into `Int`. */ diff --git a/tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala index cac041a5237b..6329d83bc2a0 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/TableStepper.scala @@ -15,6 +15,7 @@ package impl import scala.collection.Stepper.EfficientSplit import scala.collection._ +// import language.experimental.captureChecking // TODO enable private[collection] abstract class TableStepperBase[A, I >: Null <: AnyRef, Sub >: Null, Semi <: Sub with TableStepperBase[A, I, _, _]]( protected var maxLength: Int, protected val table: Array[I], protected var i0: Int, protected val iN: Int diff --git a/tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala b/tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala index 332ec65d85fd..504e0dac63ea 100644 --- a/tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala +++ b/tests/pos-special/stdlib/collection/convert/impl/VectorStepper.scala @@ -14,6 +14,7 @@ package scala.collection.convert package impl import scala.collection._ +// import language.experimental.captureChecking // TODO enable private[convert] abstract class VectorStepperBase[Sub >: Null, Semi <: Sub]( _i0: Int, @@ -91,7 +92,7 @@ with DoubleStepper { index1 = 32 i0 = half ans - } + } } private[collection] class IntVectorStepper(_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) @@ -109,7 +110,7 @@ with IntStepper { index1 = 32 i0 = half ans - } + } } private[collection] class LongVectorStepper(_i0: Int, _iN: Int, _displayN: Int, _trunk: Array[AnyRef]) @@ -127,5 +128,5 @@ with LongStepper { index1 = 32 i0 = half ans - } + } } diff --git a/tests/pos-special/stdlib/collection/generic/BitOperations.scala b/tests/pos-special/stdlib/collection/generic/BitOperations.scala index 4c64dec9dc1f..f76619a004fa 100644 --- a/tests/pos-special/stdlib/collection/generic/BitOperations.scala +++ b/tests/pos-special/stdlib/collection/generic/BitOperations.scala @@ -12,6 +12,7 @@ package scala.collection package generic +import language.experimental.captureChecking /** Some bit operations. diff --git a/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala b/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala index 69b4b3d96e61..7eba9433b8d5 100644 --- a/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala +++ b/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala @@ -16,6 +16,8 @@ import java.io.{ObjectInputStream, ObjectOutputStream} import scala.collection.{Factory, Iterable} import scala.collection.mutable.Builder +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** The default serialization proxy for collection implementations. * @@ -27,7 +29,8 @@ import scala.collection.mutable.Builder @SerialVersionUID(3L) final class DefaultSerializationProxy[A](factory: Factory[A, Any], @transient private[this] val coll: Iterable[A]) extends Serializable { - @transient protected var builder: Builder[A, Any] = _ + @transient protected var builder: Builder[A @uncheckedCaptures, Any] = _ + // @uncheckedCaptures OK since builder is used only locally when reading objects private[this] def writeObject(out: ObjectOutputStream): Unit = { out.defaultWriteObject() diff --git a/tests/pos-special/stdlib/collection/generic/IsIterable.scala b/tests/pos-special/stdlib/collection/generic/IsIterable.scala index bf2eab6bb2a6..c309299b615b 100644 --- a/tests/pos-special/stdlib/collection/generic/IsIterable.scala +++ b/tests/pos-special/stdlib/collection/generic/IsIterable.scala @@ -12,6 +12,7 @@ package scala.collection package generic +import language.experimental.captureChecking /** A trait which can be used to avoid code duplication when defining extension * methods that should be applicable both to existing Scala collections (i.e., diff --git a/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala b/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala index 7d7293037bd4..2836ca2bb520 100644 --- a/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala +++ b/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala @@ -13,6 +13,7 @@ package scala package collection package generic +import language.experimental.captureChecking /** Type class witnessing that a collection representation type `Repr` has * elements of type `A` and has a conversion to `IterableOnce[A]`. diff --git a/tests/pos-special/stdlib/collection/generic/IsMap.scala b/tests/pos-special/stdlib/collection/generic/IsMap.scala index 19f75cf7bced..ad7254d2dd61 100644 --- a/tests/pos-special/stdlib/collection/generic/IsMap.scala +++ b/tests/pos-special/stdlib/collection/generic/IsMap.scala @@ -15,6 +15,7 @@ package generic import IsMap.Tupled import scala.collection.immutable.{IntMap, LongMap} +import language.experimental.captureChecking /** * Type class witnessing that a collection type `Repr` diff --git a/tests/pos-special/stdlib/collection/generic/IsSeq.scala b/tests/pos-special/stdlib/collection/generic/IsSeq.scala index 69ea27d087d1..8ad344c4d4fc 100644 --- a/tests/pos-special/stdlib/collection/generic/IsSeq.scala +++ b/tests/pos-special/stdlib/collection/generic/IsSeq.scala @@ -14,6 +14,9 @@ package scala.collection package generic import scala.reflect.ClassTag +import language.experimental.captureChecking +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** Type class witnessing that a collection representation type `Repr` has * elements of type `A` and has a conversion to `SeqOps[A, Iterable, C]`, for @@ -51,11 +54,24 @@ object IsSeq { implicit def seqOpsIsSeq[CC0[X] <: SeqOps[X, Iterable, CC0[X]], A0]: IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] } = seqOpsIsSeqVal.asInstanceOf[IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] }] - implicit def seqViewIsSeq[CC0[X] <: SeqView[X], A0]: IsSeq[CC0[A0]] { type A = A0; type C = View[A0] } = - new IsSeq[CC0[A0]] { + /** !!! Under cc, views are not Seqs and can't use SeqOps. + * So this should be renamed to seqViewIsIterable + */ + implicit def seqViewIsSeq[CC0[X] <: SeqView[X], A0]: IsIterable[CC0[A0]] { type A = A0; type C = View[A0] } = + new IsIterable[CC0[A0]] { type A = A0 type C = View[A] - def apply(coll: CC0[A0]): SeqOps[A0, View, View[A0]] = coll + def apply(coll: CC0[A0]): IterableOps[A0, View, View[A0]] = coll + } + + /** !!! Under cc, views are not Seqs and can't use SeqOps. + * So this should be renamed to stringViewIsIterable + */ + implicit val stringViewIsSeq: IsIterable[StringView] { type A = Char; type C = View[Char] } = + new IsIterable[StringView] { + type A = Char + type C = View[Char] + def apply(coll: StringView): IterableOps[Char, View, View[Char]] = coll } implicit val stringIsSeq: IsSeq[String] { type A = Char; type C = String } = @@ -68,22 +84,15 @@ object IsSeq { def apply(i: Int): Char = s.charAt(i) def toIterable: Iterable[Char] = new immutable.WrappedString(s) protected[this] def coll: String = s - protected[this] def fromSpecific(coll: IterableOnce[Char]): String = coll.iterator.mkString - def iterableFactory: IterableFactory[immutable.ArraySeq] = immutable.ArraySeq.untagged + protected[this] def fromSpecific(coll: IterableOnce[Char]^): String = coll.iterator.mkString + def iterableFactory: FreeSeqFactory[immutable.ArraySeq] = immutable.ArraySeq.untagged override def empty: String = "" protected[this] def newSpecificBuilder: mutable.Builder[Char, String] = new StringBuilder def iterator: Iterator[Char] = s.iterator } } - implicit val stringViewIsSeq: IsSeq[StringView] { type A = Char; type C = View[Char] } = - new IsSeq[StringView] { - type A = Char - type C = View[Char] - def apply(coll: StringView): SeqOps[Char, View, View[Char]] = coll - } - - implicit def arrayIsSeq[A0 : ClassTag]: IsSeq[Array[A0]] { type A = A0; type C = Array[A0] } = + implicit def arrayIsSeq[sealed A0 : ClassTag]: IsSeq[Array[A0]] { type A = A0; type C = Array[A0] } = new IsSeq[Array[A0]] { type A = A0 type C = Array[A0] @@ -91,10 +100,10 @@ object IsSeq { new SeqOps[A, mutable.ArraySeq, Array[A]] { def apply(i: Int): A = a(i) def length: Int = a.length - def toIterable: Iterable[A] = mutable.ArraySeq.make(a) + def toIterable: Iterable[A] = mutable.ArraySeq.make[A @uncheckedCaptures](a) protected def coll: Array[A] = a - protected def fromSpecific(coll: IterableOnce[A]): Array[A] = Array.from(coll) - def iterableFactory: IterableFactory[mutable.ArraySeq] = mutable.ArraySeq.untagged + protected def fromSpecific(coll: IterableOnce[A]^): Array[A] = Array.from(coll) + def iterableFactory: FreeSeqFactory[mutable.ArraySeq] = mutable.ArraySeq.untagged override def empty: Array[A] = Array.empty[A] protected def newSpecificBuilder: mutable.Builder[A, Array[A]] = Array.newBuilder def iterator: Iterator[A] = a.iterator diff --git a/tests/pos-special/stdlib/collection/generic/Subtractable.scala b/tests/pos-special/stdlib/collection/generic/Subtractable.scala index 223997f4e972..2c0967dbaf4b 100644 --- a/tests/pos-special/stdlib/collection/generic/Subtractable.scala +++ b/tests/pos-special/stdlib/collection/generic/Subtractable.scala @@ -13,6 +13,7 @@ package scala package collection package generic +import language.experimental.captureChecking /** This trait represents collection-like objects that can be reduced * using a '+' operator. It defines variants of `-` and `--` diff --git a/tests/pos-special/stdlib/collection/generic/package.scala b/tests/pos-special/stdlib/collection/generic/package.scala index 0c16aa04dc98..0ba67c1bf76e 100644 --- a/tests/pos-special/stdlib/collection/generic/package.scala +++ b/tests/pos-special/stdlib/collection/generic/package.scala @@ -11,6 +11,7 @@ */ package scala.collection +import language.experimental.captureChecking package object generic { diff --git a/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala b/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala index 978c63034f4a..3a221fc76b6c 100644 --- a/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala +++ b/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala @@ -23,6 +23,8 @@ import scala.reflect.ClassTag import scala.runtime.ScalaRunTime import scala.util.Sorting import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** * An immutable array. @@ -38,7 +40,8 @@ sealed abstract class ArraySeq[+A] with IndexedSeqOps[A, ArraySeq, ArraySeq[A]] with StrictOptimizedSeqOps[A, ArraySeq, ArraySeq[A]] with EvidenceIterableFactoryDefaults[A, ArraySeq, ClassTag] - with Serializable { + with Serializable + with Pure { /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype @@ -53,8 +56,10 @@ sealed abstract class ArraySeq[+A] * array of a supertype or subtype of the element type. */ def unsafeArray: Array[_] + def unsafeArrayAsAnyArray = unsafeArray.asInstanceOf[Array[Any]] + protected def evidenceIterableFactory: ArraySeq.type = ArraySeq - protected def iterableEvidence: ClassTag[A @uncheckedVariance] = elemTag.asInstanceOf[ClassTag[A]] + protected def iterableEvidence: ClassTag[A @uncheckedVariance @uncheckedCaptures] = elemTag.asInstanceOf[ClassTag[A]] def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit @@ -79,10 +84,10 @@ sealed abstract class ArraySeq[+A] } override def prepended[B >: A](elem: B): ArraySeq[B] = - ArraySeq.unsafeWrapArray(unsafeArray.prepended[Any](elem)).asInstanceOf[ArraySeq[B]] + ArraySeq.unsafeWrapArray(unsafeArrayAsAnyArray.prepended(elem)).asInstanceOf[ArraySeq[B]] override def appended[B >: A](elem: B): ArraySeq[B] = - ArraySeq.unsafeWrapArray(unsafeArray.appended[Any](elem)).asInstanceOf[ArraySeq[B]] + ArraySeq.unsafeWrapArray(unsafeArrayAsAnyArray.appended[Any](elem)).asInstanceOf[ArraySeq[B]] /** Fast concatenation of two [[ArraySeq]]s. * @@ -104,8 +109,8 @@ sealed abstract class ArraySeq[+A] null else if (thisIsObj) { // A and B are objects - val ax = this.unsafeArray.asInstanceOf[Array[A]] - val ay = that.unsafeArray.asInstanceOf[Array[B]] + val ax = this.unsafeArray.asInstanceOf[Array[A @uncheckedCaptures]] + val ay = that.unsafeArray.asInstanceOf[Array[B @uncheckedCaptures]] val len = ax.length + ay.length val a = new Array[AnyRef](len) System.arraycopy(ax, 0, a, 0, ax.length) @@ -113,8 +118,8 @@ sealed abstract class ArraySeq[+A] ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] } else { // A is a primative and B = A. Use this instance's protected ClassTag. - val ax = this.unsafeArray.asInstanceOf[Array[A]] - val ay = that.unsafeArray.asInstanceOf[Array[A]] + val ax = this.unsafeArray.asInstanceOf[Array[A @uncheckedCaptures]] + val ay = that.unsafeArray.asInstanceOf[Array[A @uncheckedCaptures]] val len = ax.length + ay.length val a = iterableEvidence.newArray(len) System.arraycopy(ax, 0, a, 0, ax.length) @@ -124,7 +129,7 @@ sealed abstract class ArraySeq[+A] } } - override def appendedAll[B >: A](suffix: collection.IterableOnce[B]): ArraySeq[B] = { + override def appendedAll[B >: A](suffix: collection.IterableOnce[B]^): ArraySeq[B] = { def genericResult = { val k = suffix.knownSize if (k == 0) this @@ -147,7 +152,7 @@ sealed abstract class ArraySeq[+A] } } - override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): ArraySeq[B] = { + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): ArraySeq[B] = { def genericResult = { val k = prefix.knownSize if (k == 0) this @@ -171,7 +176,7 @@ sealed abstract class ArraySeq[+A] } } - override def zip[B](that: collection.IterableOnce[B]): ArraySeq[(A, B)] = + override def zip[B](that: collection.IterableOnce[B]^): ArraySeq[(A, B)] = that match { case bs: ArraySeq[B] => ArraySeq.tabulate(length min bs.length) { i => @@ -181,35 +186,37 @@ sealed abstract class ArraySeq[+A] strictOptimizedZip[B, ArraySeq[(A, B)]](that, iterableFactory.newBuilder) } + private inline def ops[A](xs: Array[A @uncheckedCaptures]): ArrayOps[A] = new ArrayOps[A @uncheckedCaptures](xs) + override def take(n: Int): ArraySeq[A] = if (unsafeArray.length <= n) this else - ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).take(n)).asInstanceOf[ArraySeq[A]] + ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).take(n)).asInstanceOf[ArraySeq[A]] override def takeRight(n: Int): ArraySeq[A] = if (unsafeArray.length <= n) this else - ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).takeRight(n)).asInstanceOf[ArraySeq[A]] + ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).takeRight(n)).asInstanceOf[ArraySeq[A]] override def drop(n: Int): ArraySeq[A] = if (n <= 0) this else - ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).drop(n)).asInstanceOf[ArraySeq[A]] + ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).drop(n)).asInstanceOf[ArraySeq[A]] override def dropRight(n: Int): ArraySeq[A] = if (n <= 0) this else - ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).dropRight(n)).asInstanceOf[ArraySeq[A]] + ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).dropRight(n)).asInstanceOf[ArraySeq[A]] override def slice(from: Int, until: Int): ArraySeq[A] = if (from <= 0 && unsafeArray.length <= until) this else - ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).slice(from, until)).asInstanceOf[ArraySeq[A]] + ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).slice(from, until)).asInstanceOf[ArraySeq[A]] override def foldLeft[B](z: B)(f: (B, A) => B): B = { // For ArraySeqs with sizes of [100, 1000, 10000] this is [1.3, 1.8, 1.8]x as fast @@ -239,13 +246,13 @@ sealed abstract class ArraySeq[+A] b } - override def tail: ArraySeq[A] = ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).tail).asInstanceOf[ArraySeq[A]] + override def tail: ArraySeq[A] = ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).tail).asInstanceOf[ArraySeq[A]] - override def reverse: ArraySeq[A] = ArraySeq.unsafeWrapArray(new ArrayOps(unsafeArray).reverse).asInstanceOf[ArraySeq[A]] + override def reverse: ArraySeq[A] = ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).reverse).asInstanceOf[ArraySeq[A]] override protected[this] def className = "ArraySeq" - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) if(copied > 0) { Array.copy(unsafeArray, 0, xs, start, copied) @@ -277,18 +284,18 @@ object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => def empty[A : ClassTag]: ArraySeq[A] = emptyImpl - def from[A](it: scala.collection.IterableOnce[A])(implicit tag: ClassTag[A]): ArraySeq[A] = it match { + def from[A](it: scala.collection.IterableOnce[A]^)(implicit tag: ClassTag[A]): ArraySeq[A] = it match { case as: ArraySeq[A] => as case _ => unsafeWrapArray(Array.from[A](it)) } def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] = - ArrayBuffer.newBuilder[A].mapResult(b => unsafeWrapArray[A](b.toArray)) + ArrayBuffer.newBuilder[A @uncheckedCaptures].mapResult(b => unsafeWrapArray[A](b.toArray)) override def fill[A : ClassTag](n: Int)(elem: => A): ArraySeq[A] = tabulate(n)(_ => elem) override def tabulate[A : ClassTag](n: Int)(f: Int => A): ArraySeq[A] = { - val elements = Array.ofDim[A](scala.math.max(n, 0)) + val elements = Array.ofDim[A @uncheckedCaptures](scala.math.max(n, 0)) var i = 0 while (i < n) { ScalaRunTime.array_update(elements, i, f(i)) @@ -309,7 +316,7 @@ object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => * `ArraySeq.unsafeWrapArray(a.asInstanceOf[Array[Int]])` does not work, it throws a * `ClassCastException` at runtime. */ - def unsafeWrapArray[T](x: Array[T]): ArraySeq[T] = ((x: @unchecked) match { + def unsafeWrapArray[T](x: Array[T @uncheckedCaptures]): ArraySeq[T] = ((x: @unchecked) match { case null => null case x: Array[AnyRef] => new ofRef[AnyRef](x) case x: Array[Int] => new ofInt(x) diff --git a/tests/pos-special/stdlib/collection/immutable/BitSet.scala b/tests/pos-special/stdlib/collection/immutable/BitSet.scala index 9461264850a9..9c2bfdad54d0 100644 --- a/tests/pos-special/stdlib/collection/immutable/BitSet.scala +++ b/tests/pos-special/stdlib/collection/immutable/BitSet.scala @@ -17,6 +17,7 @@ package immutable import BitSetOps.{LogWL, updateArray} import mutable.Builder import scala.annotation.{implicitNotFound, nowarn} +import language.experimental.captureChecking /** A class for immutable bitsets. * $bitsetinfo @@ -37,7 +38,7 @@ sealed abstract class BitSet override def unsorted: Set[Int] = this - override protected def fromSpecific(coll: IterableOnce[Int]): BitSet = bitSetFactory.fromSpecific(coll) + override protected def fromSpecific(coll: IterableOnce[Int]^): BitSet = bitSetFactory.fromSpecific(coll) override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder override def empty: BitSet = bitSetFactory.empty @@ -94,7 +95,7 @@ sealed abstract class BitSet @SerialVersionUID(3L) object BitSet extends SpecificIterableFactory[Int, BitSet] { - def fromSpecific(it: scala.collection.IterableOnce[Int]): BitSet = + def fromSpecific(it: scala.collection.IterableOnce[Int]^): BitSet = it match { case bs: BitSet => bs case _ => (newBuilder ++= it).result() diff --git a/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala b/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala index 711332567b0f..fc9bcb022874 100644 --- a/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala +++ b/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala @@ -16,6 +16,7 @@ package scala.collection.immutable import java.lang.Integer.bitCount import java.lang.Math.ceil import java.lang.System.arraycopy +import language.experimental.captureChecking private[collection] object Node { final val HashCodeLength = 32 @@ -112,7 +113,7 @@ private[immutable] abstract class ChampBaseIterator[T <: Node[T]] { // ChampBaseReverseIterator and in convert.impl.ChampStepperBase. // If you change this code, check those also in case they also // need to be modified. - + protected var currentValueCursor: Int = 0 protected var currentValueLength: Int = 0 protected var currentValueNode: T = _ diff --git a/tests/pos-special/stdlib/collection/immutable/HashMap.scala b/tests/pos-special/stdlib/collection/immutable/HashMap.scala index 2e8378c4d810..c364924db3a3 100644 --- a/tests/pos-special/stdlib/collection/immutable/HashMap.scala +++ b/tests/pos-special/stdlib/collection/immutable/HashMap.scala @@ -25,6 +25,8 @@ import scala.collection.{Iterator, MapFactory, MapFactoryDefaults, Stepper, Step import scala.runtime.AbstractFunction2 import scala.runtime.Statics.releaseFence import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** This class implements immutable maps using a Compressed Hash-Array Mapped Prefix-tree. * See paper https://michael.steindorfer.name/publications/oopsla15.pdf for more details. @@ -161,7 +163,7 @@ final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: newHashMapOrThis(rootNode.removed(key, keyUnimprovedHash, improve(keyUnimprovedHash), 0)) } - override def concat[V1 >: V](that: scala.IterableOnce[(K, V1)]): HashMap[K, V1] = that match { + override def concat[V1 >: V](that: scala.IterableOnce[(K, V1)]^): HashMap[K, V1] = that match { case hm: HashMap[K, V1] => if (isEmpty) hm else { @@ -384,7 +386,7 @@ final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: else new HashMap(newRootNode) } - override def removedAll(keys: IterableOnce[K]): HashMap[K, V] = { + override def removedAll(keys: IterableOnce[K]^): HashMap[K, V] = { if (isEmpty) { this } else { @@ -1766,7 +1768,7 @@ private final class BitmapIndexedMapNode[K, +V]( } else { mapOfNewNodes |= bitpos if (newNodes eq null) { - newNodes = mutable.Queue.empty + newNodes = mutable.Queue.empty[MapNode[K, V] @uncheckedCaptures] } newNodes += newSubNode } @@ -1851,7 +1853,7 @@ private final class BitmapIndexedMapNode[K, +V]( private final class HashCollisionMapNode[K, +V ]( val originalHash: Int, val hash: Int, - var content: Vector[(K, V @uV)] + var content: Vector[(K, V @uV) @uncheckedCaptures] ) extends MapNode[K, V] { import Node._ @@ -2155,7 +2157,7 @@ private final class MapKeyValueTupleReverseIterator[K, V](rootNode: MapNode[K, V private final class MapKeyValueTupleHashIterator[K, V](rootNode: MapNode[K, V]) extends ChampBaseReverseIterator[MapNode[K, V]](rootNode) with Iterator[Any] { private[this] var hash = 0 - private[this] var value: V = _ + private[this] var value: V @uncheckedCaptures = _ override def hashCode(): Int = MurmurHash3.tuple2Hash(hash, value.##, MurmurHash3.productSeed) def next() = { if (!hasNext) @@ -2202,7 +2204,7 @@ object HashMap extends MapFactory[HashMap] { def empty[K, V]: HashMap[K, V] = EmptyMap.asInstanceOf[HashMap[K, V]] - def from[K, V](source: collection.IterableOnce[(K, V)]): HashMap[K, V] = + def from[K, V](source: collection.IterableOnce[(K, V)]^): HashMap[K, V] = source match { case hs: HashMap[K, V] => hs case _ => (newBuilder[K, V] ++= source).result() @@ -2227,12 +2229,12 @@ private[immutable] final class HashMapBuilder[K, V] extends ReusableBuilder[(K, /** The last given out HashMap as a return value of `result()`, if any, otherwise null. * Indicates that on next add, the elements should be copied to an identical structure, before continuing * mutations. */ - private var aliased: HashMap[K, V] = _ + private var aliased: HashMap[K, V] @uncheckedCaptures = _ private def isAliased: Boolean = aliased != null /** The root node of the partially build hashmap */ - private var rootNode: BitmapIndexedMapNode[K, V] = newEmptyRootNode + private var rootNode: BitmapIndexedMapNode[K, V] @uncheckedCaptures = newEmptyRootNode private[immutable] def getOrElse[V0 >: V](key: K, value: V0): V0 = if (rootNode.size == 0) value @@ -2366,7 +2368,7 @@ private[immutable] final class HashMapBuilder[K, V] extends ReusableBuilder[(K, this } - override def addAll(xs: IterableOnce[(K, V)]): this.type = { + override def addAll(xs: IterableOnce[(K, V)]^): this.type = { ensureUnaliased() xs match { case hm: HashMap[K, V] => @@ -2383,7 +2385,7 @@ private[immutable] final class HashMapBuilder[K, V] extends ReusableBuilder[(K, ) currentValueCursor += 1 } - } + }.asInstanceOf // !!! cc gets confused with representation of capture sets in invariant position case hm: collection.mutable.HashMap[K, V] => val iter = hm.nodeIterator while (iter.hasNext) { diff --git a/tests/pos-special/stdlib/collection/immutable/HashSet.scala b/tests/pos-special/stdlib/collection/immutable/HashSet.scala index 459fcf1682aa..38f394a7005f 100644 --- a/tests/pos-special/stdlib/collection/immutable/HashSet.scala +++ b/tests/pos-special/stdlib/collection/immutable/HashSet.scala @@ -23,6 +23,8 @@ import scala.collection.generic.DefaultSerializable import scala.collection.mutable.ReusableBuilder import scala.runtime.Statics.releaseFence import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** This class implements immutable sets using a Compressed Hash-Array Mapped Prefix-tree. * See paper https://michael.steindorfer.name/publications/oopsla15.pdf for more details. @@ -1152,7 +1154,7 @@ private final class BitmapIndexedSetNode[A]( } else { mapOfNewNodes |= bitpos if (newNodes eq null) { - newNodes = mutable.Queue.empty + newNodes = mutable.Queue.empty[SetNode[A] @uncheckedCaptures] } newNodes += newSubNode } @@ -1160,7 +1162,7 @@ private final class BitmapIndexedSetNode[A]( newDataMap |= bitpos nodeMigrateToDataTargetMap |= bitpos if (nodesToMigrateToData eq null) { - nodesToMigrateToData = mutable.Queue.empty + nodesToMigrateToData = mutable.Queue.empty[SetNode[A] @uncheckedCaptures] } nodesToMigrateToData += newSubNode } @@ -1267,7 +1269,7 @@ private final class BitmapIndexedSetNode[A]( } else { mapOfNewNodes |= bitpos if (newNodes eq null) { - newNodes = mutable.Queue.empty + newNodes = mutable.Queue.empty[SetNode[A] @uncheckedCaptures] } newNodes += newSubNode } @@ -1275,7 +1277,7 @@ private final class BitmapIndexedSetNode[A]( newDataMap |= bitpos nodeMigrateToDataTargetMap |= bitpos if (nodesToMigrateToData eq null) { - nodesToMigrateToData = mutable.Queue.empty + nodesToMigrateToData = mutable.Queue.empty[SetNode[A] @uncheckedCaptures] } nodesToMigrateToData += newSubNode } @@ -1740,7 +1742,7 @@ private final class BitmapIndexedSetNode[A]( } } -private final class HashCollisionSetNode[A](val originalHash: Int, val hash: Int, var content: Vector[A]) extends SetNode[A] { +private final class HashCollisionSetNode[A](val originalHash: Int, val hash: Int, var content: Vector[A] @uncheckedCaptures) extends SetNode[A] { import Node._ @@ -1944,7 +1946,7 @@ object HashSet extends IterableFactory[HashSet] { def empty[A]: HashSet[A] = EmptySet.asInstanceOf[HashSet[A]] - def from[A](source: collection.IterableOnce[A]): HashSet[A] = + def from[A](source: collection.IterableOnce[A]^): HashSet[A] = source match { case hs: HashSet[A] => hs case _ if source.knownSize == 0 => empty[A] @@ -1969,12 +1971,12 @@ private[collection] final class HashSetBuilder[A] extends ReusableBuilder[A, Has /** The last given out HashSet as a return value of `result()`, if any, otherwise null. * Indicates that on next add, the elements should be copied to an identical structure, before continuing * mutations. */ - private var aliased: HashSet[A] = _ + private var aliased: HashSet[A] @uncheckedCaptures = _ private def isAliased: Boolean = aliased != null /** The root node of the partially build hashmap */ - private var rootNode: BitmapIndexedSetNode[A] = newEmptyRootNode + private var rootNode: BitmapIndexedSetNode[A] @uncheckedCaptures = newEmptyRootNode /** Inserts element `elem` into array `as` at index `ix`, shifting right the trailing elems */ private def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = { @@ -2084,7 +2086,7 @@ private[collection] final class HashSetBuilder[A] extends ReusableBuilder[A, Has this } - override def addAll(xs: IterableOnce[A]) = { + override def addAll(xs: IterableOnce[A]^) = { ensureUnaliased() xs match { case hm: HashSet[A] => @@ -2100,7 +2102,7 @@ private[collection] final class HashSetBuilder[A] extends ReusableBuilder[A, Has ) currentValueCursor += 1 } - } + }.asInstanceOf // !!! cc gets confused with representation of capture sets in invariant position case other => val it = other.iterator while(it.hasNext) addOne(it.next()) diff --git a/tests/pos-special/stdlib/collection/immutable/IntMap.scala b/tests/pos-special/stdlib/collection/immutable/IntMap.scala index 240821b11460..d7077845b845 100644 --- a/tests/pos-special/stdlib/collection/immutable/IntMap.scala +++ b/tests/pos-special/stdlib/collection/immutable/IntMap.scala @@ -18,6 +18,8 @@ import scala.collection.mutable.{Builder, ImmutableBuilder} import scala.annotation.tailrec import scala.annotation.unchecked.uncheckedVariance import scala.language.implicitConversions +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** Utility class for integer maps. */ @@ -52,7 +54,7 @@ object IntMap { def apply[T](elems: (Int, T)*): IntMap[T] = elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) - def from[V](coll: IterableOnce[(Int, V)]): IntMap[V] = + def from[V](coll: IterableOnce[(Int, V)]^): IntMap[V] = newBuilder[V].addAll(coll).result() private[immutable] case object Nil extends IntMap[Nothing] { @@ -89,13 +91,13 @@ object IntMap { @SerialVersionUID(3L) private[this] object ToFactory extends Factory[(Int, AnyRef), IntMap[AnyRef]] with Serializable { - def fromSpecific(it: IterableOnce[(Int, AnyRef)]): IntMap[AnyRef] = IntMap.from[AnyRef](it) + def fromSpecific(it: IterableOnce[(Int, AnyRef)]^): IntMap[AnyRef] = IntMap.from[AnyRef](it) def newBuilder: Builder[(Int, AnyRef), IntMap[AnyRef]] = IntMap.newBuilder[AnyRef] } implicit def toBuildFrom[V](factory: IntMap.type): BuildFrom[Any, (Int, V), IntMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Int, V), IntMap[V]]] private[this] object ToBuildFrom extends BuildFrom[Any, (Int, AnyRef), IntMap[AnyRef]] { - def fromSpecific(from: Any)(it: IterableOnce[(Int, AnyRef)]) = IntMap.from(it) + def fromSpecific(from: Any)(it: IterableOnce[(Int, AnyRef)]^) = IntMap.from(it) def newBuilder(from: Any) = IntMap.newBuilder[AnyRef] } @@ -180,9 +182,9 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] with StrictOptimizedMapOps[Int, T, Map, IntMap[T]] with Serializable { - override protected def fromSpecific(coll: scala.collection.IterableOnce[(Int, T) @uncheckedVariance]): IntMap[T] = + override protected def fromSpecific(coll: scala.collection.IterableOnce[(Int, T) @uncheckedVariance]^): IntMap[T] = intMapFrom[T](coll) - protected def intMapFrom[V2](coll: scala.collection.IterableOnce[(Int, V2)]): IntMap[V2] = { + protected def intMapFrom[V2](coll: scala.collection.IterableOnce[(Int, V2)]^): IntMap[V2] = { val b = IntMap.newBuilder[V2] b.sizeHint(coll) b.addAll(coll) @@ -196,7 +198,7 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] override def empty: IntMap[T] = IntMap.Nil override def toList = { - val buffer = new scala.collection.mutable.ListBuffer[(Int, T)] + val buffer = new scala.collection.mutable.ListBuffer[(Int, T) @uncheckedCaptures] foreach(buffer += _) buffer.toList } @@ -327,10 +329,10 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] def flatMap[V2](f: ((Int, T)) => IterableOnce[(Int, V2)]): IntMap[V2] = intMapFrom(new View.FlatMap(this, f)) - override def concat[V1 >: T](that: collection.IterableOnce[(Int, V1)]): IntMap[V1] = + override def concat[V1 >: T](that: collection.IterableOnce[(Int, V1)]^): IntMap[V1] = super.concat(that).asInstanceOf[IntMap[V1]] // Already has correct type but not declared as such - override def ++ [V1 >: T](that: collection.IterableOnce[(Int, V1)]): IntMap[V1] = concat(that) + override def ++ [V1 >: T](that: collection.IterableOnce[(Int, V1)]^): IntMap[V1] = concat(that) def collect[V2](pf: PartialFunction[(Int, T), (Int, V2)]): IntMap[V2] = strictOptimizedCollect(IntMap.newBuilder[V2], pf) diff --git a/tests/pos-special/stdlib/collection/immutable/Iterable.scala b/tests/pos-special/stdlib/collection/immutable/Iterable.scala index d4199ab3ab14..c4f9900eea8b 100644 --- a/tests/pos-special/stdlib/collection/immutable/Iterable.scala +++ b/tests/pos-special/stdlib/collection/immutable/Iterable.scala @@ -13,6 +13,7 @@ package scala.collection.immutable import scala.collection.{IterableFactory, IterableFactoryDefaults} +import language.experimental.captureChecking /** A trait for collections that are guaranteed immutable. * @@ -24,13 +25,14 @@ import scala.collection.{IterableFactory, IterableFactoryDefaults} trait Iterable[+A] extends collection.Iterable[A] with collection.IterableOps[A, Iterable, Iterable[A]] with IterableFactoryDefaults[A, Iterable] { + this: Iterable[A]^ => override def iterableFactory: IterableFactory[Iterable] = Iterable } @SerialVersionUID(3L) object Iterable extends IterableFactory.Delegate[Iterable](List) { - override def from[E](it: IterableOnce[E]): Iterable[E] = it match { + override def from[E](it: IterableOnce[E]^): Iterable[E]^{it} = it match { case iterable: Iterable[E] => iterable case _ => super.from(it) } diff --git a/tests/pos-special/stdlib/collection/immutable/LazyList.scala b/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala similarity index 66% rename from tests/pos-special/stdlib/collection/immutable/LazyList.scala rename to tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala index 8b7ad26dc5ae..5684130b6048 100644 --- a/tests/pos-special/stdlib/collection/immutable/LazyList.scala +++ b/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala @@ -22,21 +22,29 @@ import scala.collection.generic.SerializeEnd import scala.collection.mutable.{Builder, ReusableBuilder, StringBuilder} import scala.language.implicitConversions import scala.runtime.Statics +import language.experimental.captureChecking +import annotation.unchecked.uncheckedCaptures /** This class implements an immutable linked list. We call it "lazy" * because it computes its elements only when they are needed. * + * The class extends Iterable; it is a replacement for LazyList, which + * which implemented Seq. The reason is that under capture checking, we + * assume that all Seqs are strict, and LazyList broke that assumption. + * As a consequence, we declare LazyList is deprecated and unsafe for + * capture checking, and replace it by the current class, LazyListIterable. + * * Elements are memoized; that is, the value of each element is computed at most once. * * Elements are computed in-order and are never skipped. In other words, * accessing the tail causes the head to be computed first. * - * How lazy is a `LazyList`? When you have a value of type `LazyList`, you + * How lazy is a `LazyListIterable`? When you have a value of type `LazyListIterable`, you * don't know yet whether the list is empty or not. If you learn that it is non-empty, * then you also know that the head has been computed. But the tail is itself - * a `LazyList`, whose emptiness-or-not might remain undetermined. + * a `LazyListIterable`, whose emptiness-or-not might remain undetermined. * - * A `LazyList` may be infinite. For example, `LazyList.from(0)` contains + * A `LazyListIterable` may be infinite. For example, `LazyListIterable.from(0)` contains * all of the natural numbers 0, 1, 2, and so on. For infinite sequences, * some methods (such as `count`, `sum`, `max` or `min`) will not terminate. * @@ -45,7 +53,7 @@ import scala.runtime.Statics * {{{ * import scala.math.BigInt * object Main extends App { - * val fibs: LazyList[BigInt] = + * val fibs: LazyListIterable[BigInt] = * BigInt(0) #:: BigInt(1) #:: fibs.zip(fibs.tail).map{ n => n._1 + n._2 } * fibs.take(5).foreach(println) * } @@ -65,7 +73,7 @@ import scala.runtime.Statics * {{{ * import scala.math.BigInt * object Main extends App { - * val fibs: LazyList[BigInt] = + * val fibs: LazyListIterable[BigInt] = * BigInt(0) #:: BigInt(1) #:: * fibs.zip(fibs.tail).map{ n => * println(s"Adding \${n._1} and \${n._2}") @@ -98,22 +106,22 @@ import scala.runtime.Statics * }}} * * Note that the definition of `fibs` uses `val` not `def`. The memoization of the - * `LazyList` requires us to have somewhere to store the information and a `val` + * `LazyListIterable` requires us to have somewhere to store the information and a `val` * allows us to do that. * - * Further remarks about the semantics of `LazyList`: + * Further remarks about the semantics of `LazyListIterable`: * - * - Though the `LazyList` changes as it is accessed, this does not + * - Though the `LazyListIterable` changes as it is accessed, this does not * contradict its immutability. Once the values are memoized they do * not change. Values that have yet to be memoized still "exist", they * simply haven't been computed yet. * * - One must be cautious of memoization; it can eat up memory if you're not - * careful. That's because memoization of the `LazyList` creates a structure much like + * careful. That's because memoization of the `LazyListIterable` creates a structure much like * [[scala.collection.immutable.List]]. As long as something is holding on to * the head, the head holds on to the tail, and so on recursively. * If, on the other hand, there is nothing holding on to the head (e.g. if we used - * `def` to define the `LazyList`) then once it is no longer being used directly, + * `def` to define the `LazyListIterable`) then once it is no longer being used directly, * it disappears. * * - Note that some operations, including [[drop]], [[dropWhile]], @@ -133,30 +141,30 @@ import scala.runtime.Statics * } * } * - * // Our first LazyList definition will be a val definition - * val lazylist1: LazyList[Int] = { - * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) + * // Our first LazyListIterable definition will be a val definition + * val lazylist1: LazyListIterable[Int] = { + * def loop(v: Int): LazyListIterable[Int] = v #:: loop(v + 1) * loop(0) * } * * // Because lazylist1 is a val, everything that the iterator produces is held - * // by virtue of the fact that the head of the LazyList is held in lazylist1 + * // by virtue of the fact that the head of the LazyListIterable is held in lazylist1 * val it1 = lazylist1.iterator * loop("Iterator1: ", it1.next(), it1) * - * // We can redefine this LazyList such that all we have is the Iterator left - * // and allow the LazyList to be garbage collected as required. Using a def - * // to provide the LazyList ensures that no val is holding onto the head as + * // We can redefine this LazyListIterable such that all we have is the Iterator left + * // and allow the LazyListIterable to be garbage collected as required. Using a def + * // to provide the LazyListIterable ensures that no val is holding onto the head as * // is the case with lazylist1 - * def lazylist2: LazyList[Int] = { - * def loop(v: Int): LazyList[Int] = v #:: loop(v + 1) + * def lazylist2: LazyListIterable[Int] = { + * def loop(v: Int): LazyListIterable[Int] = v #:: loop(v + 1) * loop(0) * } * val it2 = lazylist2.iterator * loop("Iterator2: ", it2.next(), it2) * - * // And, of course, we don't actually need a LazyList at all for such a simple - * // problem. There's no reason to use a LazyList if you don't actually need + * // And, of course, we don't actually need a LazyListIterable at all for such a simple + * // problem. There's no reason to use a LazyListIterable if you don't actually need * // one. * val it3 = new Iterator[Int] { * var i = -1 @@ -167,7 +175,7 @@ import scala.runtime.Statics * }}} * * - In the `fibs` example earlier, the fact that `tail` works at all is of interest. - * `fibs` has an initial `(0, 1, LazyList(...))`, so `tail` is deterministic. + * `fibs` has an initial `(0, 1, LazyListIterable(...))`, so `tail` is deterministic. * If we defined `fibs` such that only `0` were concretely known, then the act * of determining `tail` would require the evaluation of `tail`, so the * computation would be unable to progress, as in this code: @@ -175,7 +183,7 @@ import scala.runtime.Statics * // The first time we try to access the tail we're going to need more * // information which will require us to recurse, which will require us to * // recurse, which... - * lazy val sov: LazyList[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 } + * lazy val sov: LazyListIterable[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 } * }}} * * The definition of `fibs` above creates a larger number of objects than @@ -184,8 +192,8 @@ import scala.runtime.Statics * fact that it has a more direct route to the numbers themselves: * * {{{ - * lazy val fib: LazyList[Int] = { - * def loop(h: Int, n: Int): LazyList[Int] = h #:: loop(n, h + n) + * lazy val fib: LazyListIterable[Int] = { + * def loop(h: Int, n: Int): LazyListIterable[Int] = h #:: loop(n, h + n) * loop(1, 1) * } * }}} @@ -196,8 +204,8 @@ import scala.runtime.Statics * the tails content is deferred until the tails empty status, head or tail is * evaluated. * - * Delaying the evaluation of whether a LazyList is empty or not until it's needed - * allows LazyList to not eagerly evaluate any elements on a call to `filter`. + * Delaying the evaluation of whether a LazyListIterable is empty or not until it's needed + * allows LazyListIterable to not eagerly evaluate any elements on a call to `filter`. * * Only when it's further evaluated (which may be never!) any of the elements gets * forced. @@ -205,24 +213,24 @@ import scala.runtime.Statics * for example: * * {{{ - * def tailWithSideEffect: LazyList[Nothing] = { - * println("getting empty LazyList") - * LazyList.empty + * def tailWithSideEffect: LazyListIterable[Nothing] = { + * println("getting empty LazyListIterable") + * LazyListIterable.empty * } * - * val emptyTail = tailWithSideEffect // prints "getting empty LazyList" + * val emptyTail = tailWithSideEffect // prints "getting empty LazyListIterable" * * val suspended = 1 #:: tailWithSideEffect // doesn't print anything * val tail = suspended.tail // although the tail is evaluated, *still* nothing is yet printed * val filtered = tail.filter(_ => false) // still nothing is printed - * filtered.isEmpty // prints "getting empty LazyList" + * filtered.isEmpty // prints "getting empty LazyListIterable" * }}} * * @tparam A the type of the elements contained in this lazy list. * * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#lazylists "Scala's Collection Library overview"]] * section on `LazyLists` for more information. - * @define Coll `LazyList` + * @define Coll `LazyListIterable` * @define coll lazy list * @define orderDependent * @define orderDependentFold @@ -237,23 +245,24 @@ import scala.runtime.Statics * @define evaluatesAllElements This method evaluates all elements of the collection. */ @SerialVersionUID(3L) -final class LazyList[+A] private(private[this] var lazyState: () => LazyList.State[A]) - extends AbstractSeq[A] - with LinearSeq[A] - with LinearSeqOps[A, LazyList, LazyList[A]] - with IterableFactoryDefaults[A, LazyList] +final class LazyListIterable[+A] private(private[this] var lazyState: () => LazyListIterable.State[A]^) + extends AbstractIterable[A] + with Iterable[A] + with IterableOps[A, LazyListIterable, LazyListIterable[A]] + with IterableFactoryDefaults[A, LazyListIterable] with Serializable { - import LazyList._ + this: LazyListIterable[A]^ => + import LazyListIterable._ @volatile private[this] var stateEvaluated: Boolean = false @inline private def stateDefined: Boolean = stateEvaluated private[this] var midEvaluation = false - private lazy val state: State[A] = { + private lazy val state: State[A]^ = { // if it's already mid-evaluation, we're stuck in an infinite // self-referential loop (also it's empty) if (midEvaluation) { - throw new RuntimeException("self-referential LazyList or a derivation thereof has no more elements") + throw new RuntimeException("self-referential LazyListIterable or a derivation thereof has no more elements") } midEvaluation = true val res = try lazyState() finally midEvaluation = false @@ -264,7 +273,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta res } - override def iterableFactory: SeqFactory[LazyList] = LazyList + override def iterableFactory: IterableFactory[LazyListIterable] = LazyListIterable override def isEmpty: Boolean = state eq State.Empty @@ -276,7 +285,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta override def head: A = state.head - override def tail: LazyList[A] = state.tail + override def tail: LazyListIterable[A]^{this} = state.tail @inline private[this] def knownIsEmpty: Boolean = stateEvaluated && (isEmpty: @inline) @inline private def knownNonEmpty: Boolean = stateEvaluated && !(isEmpty: @inline) @@ -287,13 +296,13 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * elements of the cycle are evaluated. For example: * * {{{ - * val ring: LazyList[Int] = 1 #:: 2 #:: 3 #:: ring + * val ring: LazyListIterable[Int] = 1 #:: 2 #:: 3 #:: ring * ring.force * ring.toString * * // prints * // - * // LazyList(1, 2, 3, ...) + * // LazyListIterable(1, 2, 3, ...) * }}} * * This method will *not* terminate for non-cyclic infinite-sized collections. @@ -302,7 +311,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta */ def force: this.type = { // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) - var these, those: LazyList[A] = this + var these, those: LazyListIterable[A]^{this} = this if (!these.isEmpty) { these = these.tail } @@ -322,7 +331,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * The iterator returned by this method preserves laziness; elements are * only evaluated individually as needed. */ - override def iterator: Iterator[A] = + override def iterator: Iterator[A]^{this} = if (knownIsEmpty) Iterator.empty else new LazyIterator(this) @@ -332,9 +341,9 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * @param f The treatment to apply to each element. * @note Overridden here as final to trigger tail-call optimization, which * replaces 'this' with 'tail' at each iteration. This is absolutely - * necessary for allowing the GC to collect the underlying LazyList as elements + * necessary for allowing the GC to collect the underlying LazyListIterable as elements * are consumed. - * @note This function will force the realization of the entire LazyList + * @note This function will force the realization of the entire LazyListIterable * unless the `f` throws an exception. */ @tailrec @@ -345,12 +354,12 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta } } - /** LazyList specialization of foldLeft which allows GC to collect along the + /** LazyListIterable specialization of foldLeft which allows GC to collect along the * way. * * @tparam B The type of value being accumulated. * @param z The initial value seeded into the function `op`. - * @param op The operation to perform on successive elements of the `LazyList`. + * @param op The operation to perform on successive elements of the `LazyListIterable`. * @return The accumulated value from successive applications of `op`. */ @tailrec @@ -359,10 +368,10 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta else tail.foldLeft(op(z, head))(op) // State.Empty doesn't use the SerializationProxy - protected[this] def writeReplace(): AnyRef = - if (knownNonEmpty) new LazyList.SerializationProxy[A](this) else this + protected[this] def writeReplace(): AnyRef^{this} = + if (knownNonEmpty) new LazyListIterable.SerializationProxy[A](this) else this - override protected[this] def className = "LazyList" + override protected[this] def className = "LazyListIterable" /** The lazy list resulting from the concatenation of this lazy list with the argument lazy list. * @@ -373,10 +382,10 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * @param suffix The collection that gets appended to this lazy list * @return The lazy list containing elements of this lazy list and the iterable object. */ - def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]): LazyList[B] = + def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]^): LazyListIterable[B]^{this, suffix} = newLL { if (isEmpty) suffix match { - case lazyList: LazyList[B] => lazyList.state // don't recompute the LazyList + case lazyList: LazyListIterable[B] => lazyList.state // don't recompute the LazyListIterable case coll if coll.knownSize == 0 => State.Empty case coll => stateFromIterator(coll.iterator) } @@ -389,8 +398,8 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $appendStackSafety */ - override def appendedAll[B >: A](suffix: IterableOnce[B]): LazyList[B] = - if (knownIsEmpty) LazyList.from(suffix) + def appendedAll[B >: A](suffix: IterableOnce[B]^): LazyListIterable[B]^{this, suffix} = + if (knownIsEmpty) LazyListIterable.from(suffix) else lazyAppendedAll(suffix) /** @inheritdoc @@ -399,19 +408,19 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $appendStackSafety */ - override def appended[B >: A](elem: B): LazyList[B] = - if (knownIsEmpty) newLL(sCons(elem, LazyList.empty)) + def appended[B >: A](elem: B): LazyListIterable[B]^{this} = + if (knownIsEmpty) newLL(sCons(elem, LazyListIterable.empty)) else lazyAppendedAll(Iterator.single(elem)) /** @inheritdoc * * $preservesLaziness */ - override def scanLeft[B](z: B)(op: (B, A) => B): LazyList[B] = - if (knownIsEmpty) newLL(sCons(z, LazyList.empty)) + override def scanLeft[B](z: B)(op: (B, A) => B): LazyListIterable[B]^{this, op} = + if (knownIsEmpty) newLL(sCons(z, LazyListIterable.empty)) else newLL(scanLeftState(z)(op)) - private def scanLeftState[B](z: B)(op: (B, A) => B): State[B] = + private def scanLeftState[B](z: B)(op: (B, A) => B): State[B]^{this, op} = sCons( z, newLL { @@ -420,18 +429,18 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta } ) - /** LazyList specialization of reduceLeft which allows GC to collect + /** LazyListIterable specialization of reduceLeft which allows GC to collect * along the way. * * @tparam B The type of value being accumulated. - * @param f The operation to perform on successive elements of the `LazyList`. + * @param f The operation to perform on successive elements of the `LazyListIterable`. * @return The accumulated value from successive applications of `f`. */ override def reduceLeft[B >: A](f: (B, A) => B): B = { if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") else { var reducedRes: B = this.head - var left: LazyList[A] = this.tail + var left: LazyListIterable[A]^{this} = this.tail while (!left.isEmpty) { reducedRes = f(reducedRes, left.head) left = left.tail @@ -444,13 +453,13 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def partition(p: A => Boolean): (LazyList[A], LazyList[A]) = (filter(p), filterNot(p)) + override def partition(p: A => Boolean): (LazyListIterable[A]^{this, p}, LazyListIterable[A]^{this, p}) = (filter(p), filterNot(p)) /** @inheritdoc * * $preservesLaziness */ - override def partitionMap[A1, A2](f: A => Either[A1, A2]): (LazyList[A1], LazyList[A2]) = { + override def partitionMap[A1, A2](f: A => Either[A1, A2]): (LazyListIterable[A1]^{this, f}, LazyListIterable[A2]^{this, f}) = { val (left, right) = map(f).partition(_.isLeft) (left.map(_.asInstanceOf[Left[A1, _]].value), right.map(_.asInstanceOf[Right[_, A2]].value)) } @@ -459,17 +468,17 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def filter(pred: A => Boolean): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else LazyList.filterImpl(this, pred, isFlipped = false) + override def filter(pred: A => Boolean): LazyListIterable[A]^{this, pred} = + if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.filterImpl(this, pred, isFlipped = false) /** @inheritdoc * * $preservesLaziness */ - override def filterNot(pred: A => Boolean): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else LazyList.filterImpl(this, pred, isFlipped = true) + override def filterNot(pred: A => Boolean): LazyListIterable[A]^{this, pred} = + if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.filterImpl(this, pred, isFlipped = true) /** A `collection.WithFilter` which allows GC of the head of lazy list during processing. * @@ -479,21 +488,21 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * The `collection.WithFilter` returned by this method preserves laziness; elements are * only evaluated individually as needed. */ - override def withFilter(p: A => Boolean): collection.WithFilter[A, LazyList] = - new LazyList.WithFilter(coll, p) + override def withFilter(p: A => Boolean): collection.WithFilter[A, LazyListIterable]^{this, p} = + new LazyListIterable.WithFilter(coll, p) /** @inheritdoc * * $preservesLaziness */ - override def prepended[B >: A](elem: B): LazyList[B] = newLL(sCons(elem, this)) + def prepended[B >: A](elem: B): LazyListIterable[B] = newLL(sCons(elem, this)) /** @inheritdoc * * $preservesLaziness */ - override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): LazyList[B] = - if (knownIsEmpty) LazyList.from(prefix) + def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): LazyListIterable[B]^{this, prefix} = + if (knownIsEmpty) LazyListIterable.from(prefix) else if (prefix.knownSize == 0) this else newLL(stateFromIteratorConcatSuffix(prefix.iterator)(state)) @@ -501,17 +510,17 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def map[B](f: A => B): LazyList[B] = - if (knownIsEmpty) LazyList.empty + override def map[B](f: A => B): LazyListIterable[B]^{this, f} = + if (knownIsEmpty) LazyListIterable.empty else (mapImpl(f): @inline) /** @inheritdoc * * $preservesLaziness */ - override def tapEach[U](f: A => U): LazyList[A] = map { a => f(a); a } + override def tapEach[U](f: A => U): LazyListIterable[A]^{this, f} = map { a => f(a); a } - private def mapImpl[B](f: A => B): LazyList[B] = + private def mapImpl[B](f: A => B): LazyListIterable[B]^{this, f} = newLL { if (isEmpty) State.Empty else sCons(f(head), tail.mapImpl(f)) @@ -521,9 +530,9 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def collect[B](pf: PartialFunction[A, B]): LazyList[B] = - if (knownIsEmpty) LazyList.empty - else LazyList.collectImpl(this, pf) + override def collect[B](pf: PartialFunction[A, B]^): LazyListIterable[B]^{this, pf} = + if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.collectImpl(this, pf) /** @inheritdoc * @@ -534,7 +543,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta override def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = if (isEmpty) None else { - val res = pf.applyOrElse(head, LazyList.anyToMarker.asInstanceOf[A => B]) + val res = pf.applyOrElse(head, LazyListIterable.anyToMarker.asInstanceOf[A => B]) if (res.asInstanceOf[AnyRef] eq Statics.pfMarker) tail.collectFirst(pf) else Some(res) } @@ -559,25 +568,25 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta */ // optimisations are not for speed, but for functionality // see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala) - override def flatMap[B](f: A => IterableOnce[B]): LazyList[B] = - if (knownIsEmpty) LazyList.empty - else LazyList.flatMapImpl(this, f) + override def flatMap[B](f: A => IterableOnce[B]^): LazyListIterable[B]^{this, f} = + if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.flatMapImpl(this, f) /** @inheritdoc * * $preservesLaziness */ - override def flatten[B](implicit asIterable: A => IterableOnce[B]): LazyList[B] = flatMap(asIterable) + override def flatten[B](implicit asIterable: A -> IterableOnce[B]): LazyListIterable[B]^{this} = flatMap(asIterable) /** @inheritdoc * * $preservesLaziness */ - override def zip[B](that: collection.IterableOnce[B]): LazyList[(A, B)] = - if (this.knownIsEmpty || that.knownSize == 0) LazyList.empty + override def zip[B](that: collection.IterableOnce[B]^): LazyListIterable[(A, B)]^{this, that} = + if (this.knownIsEmpty || that.knownSize == 0) LazyListIterable.empty else newLL(zipState(that.iterator)) - private def zipState[B](it: Iterator[B]): State[(A, B)] = + private def zipState[B](it: Iterator[B]^): State[(A, B)]^{this, it} = if (this.isEmpty || !it.hasNext) State.Empty else sCons((head, it.next()), newLL { tail zipState it }) @@ -585,29 +594,29 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def zipWithIndex: LazyList[(A, Int)] = this zip LazyList.from(0) + override def zipWithIndex: LazyListIterable[(A, Int)]^{this} = this zip LazyListIterable.from(0) /** @inheritdoc * * $preservesLaziness */ - override def zipAll[A1 >: A, B](that: collection.Iterable[B], thisElem: A1, thatElem: B): LazyList[(A1, B)] = { + override def zipAll[A1 >: A, B](that: collection.Iterable[B]^, thisElem: A1, thatElem: B): LazyListIterable[(A1, B)]^{this, that} = { if (this.knownIsEmpty) { - if (that.knownSize == 0) LazyList.empty - else LazyList.continually(thisElem) zip that + if (that.knownSize == 0) LazyListIterable.empty + else LazyListIterable.continually(thisElem) zip that } else { - if (that.knownSize == 0) zip(LazyList.continually(thatElem)) + if (that.knownSize == 0) zip(LazyListIterable.continually(thatElem)) else newLL(zipAllState(that.iterator, thisElem, thatElem)) } } - private def zipAllState[A1 >: A, B](it: Iterator[B], thisElem: A1, thatElem: B): State[(A1, B)] = { + private def zipAllState[A1 >: A, B](it: Iterator[B]^, thisElem: A1, thatElem: B): State[(A1, B)]^{this, it} = { if (it.hasNext) { - if (this.isEmpty) sCons((thisElem, it.next()), newLL { LazyList.continually(thisElem) zipState it }) + if (this.isEmpty) sCons((thisElem, it.next()), newLL { LazyListIterable.continually(thisElem) zipState it }) else sCons((this.head, it.next()), newLL { this.tail.zipAllState(it, thisElem, thatElem) }) } else { if (this.isEmpty) State.Empty - else sCons((this.head, thatElem), this.tail zip LazyList.continually(thatElem)) + else sCons((this.head, thatElem), this.tail zip LazyListIterable.continually(thatElem)) } } @@ -620,21 +629,21 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * only evaluated individually as needed. */ // just in case it can be meaningfully overridden at some point - override def lazyZip[B](that: collection.Iterable[B]): LazyZip2[A, B, LazyList.this.type] = + override def lazyZip[B](that: collection.Iterable[B]^): LazyZip2[A, B, LazyListIterable.this.type]^{this, that} = super.lazyZip(that) /** @inheritdoc * * $preservesLaziness */ - override def unzip[A1, A2](implicit asPair: A => (A1, A2)): (LazyList[A1], LazyList[A2]) = + override def unzip[A1, A2](implicit asPair: A -> (A1, A2)): (LazyListIterable[A1]^{this}, LazyListIterable[A2]^{this}) = (map(asPair(_)._1), map(asPair(_)._2)) /** @inheritdoc * * $preservesLaziness */ - override def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (LazyList[A1], LazyList[A2], LazyList[A3]) = + override def unzip3[A1, A2, A3](implicit asTriple: A -> (A1, A2, A3)): (LazyListIterable[A1]^{this}, LazyListIterable[A2]^{this}, LazyListIterable[A3]^{this}) = (map(asTriple(_)._1), map(asTriple(_)._2), map(asTriple(_)._3)) /** @inheritdoc @@ -642,27 +651,27 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * $initiallyLazy * Additionally, it preserves laziness for all except the first `n` elements. */ - override def drop(n: Int): LazyList[A] = + override def drop(n: Int): LazyListIterable[A]^{this} = if (n <= 0) this - else if (knownIsEmpty) LazyList.empty - else LazyList.dropImpl(this, n) + else if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.dropImpl(this, n) /** @inheritdoc * * $initiallyLazy * Additionally, it preserves laziness for all elements after the predicate returns `false`. */ - override def dropWhile(p: A => Boolean): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else LazyList.dropWhileImpl(this, p) + override def dropWhile(p: A => Boolean): LazyListIterable[A]^{this, p} = + if (knownIsEmpty) LazyListIterable.empty + else LazyListIterable.dropWhileImpl(this, p) /** @inheritdoc * * $initiallyLazy */ - override def dropRight(n: Int): LazyList[A] = { + override def dropRight(n: Int): LazyListIterable[A]^{this} = { if (n <= 0) this - else if (knownIsEmpty) LazyList.empty + else if (knownIsEmpty) LazyListIterable.empty else newLL { var scout = this var remaining = n @@ -675,7 +684,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta } } - private def dropRightState(scout: LazyList[_]): State[A] = + private def dropRightState(scout: LazyListIterable[_]^): State[A]^{this, scout} = if (scout.isEmpty) State.Empty else sCons(head, newLL(tail.dropRightState(scout.tail))) @@ -683,12 +692,12 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def take(n: Int): LazyList[A] = - if (knownIsEmpty) LazyList.empty + override def take(n: Int): LazyListIterable[A] = + if (knownIsEmpty) LazyListIterable.empty else (takeImpl(n): @inline) - private def takeImpl(n: Int): LazyList[A] = { - if (n <= 0) LazyList.empty + private def takeImpl(n: Int): LazyListIterable[A] = { + if (n <= 0) LazyListIterable.empty else newLL { if (isEmpty) State.Empty else sCons(head, tail.takeImpl(n - 1)) @@ -699,11 +708,11 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def takeWhile(p: A => Boolean): LazyList[A] = - if (knownIsEmpty) LazyList.empty + override def takeWhile(p: A => Boolean): LazyListIterable[A]^{this, p} = + if (knownIsEmpty) LazyListIterable.empty else (takeWhileImpl(p): @inline) - private def takeWhileImpl(p: A => Boolean): LazyList[A] = + private def takeWhileImpl(p: A => Boolean): LazyListIterable[A]^{this, p} = newLL { if (isEmpty || !p(head)) State.Empty else sCons(head, tail.takeWhileImpl(p)) @@ -713,45 +722,29 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $initiallyLazy */ - override def takeRight(n: Int): LazyList[A] = - if (n <= 0 || knownIsEmpty) LazyList.empty - else LazyList.takeRightImpl(this, n) + override def takeRight(n: Int): LazyListIterable[A]^{this} = + if (n <= 0 || knownIsEmpty) LazyListIterable.empty + else LazyListIterable.takeRightImpl(this, n) /** @inheritdoc * * $initiallyLazy * Additionally, it preserves laziness for all but the first `from` elements. */ - override def slice(from: Int, until: Int): LazyList[A] = take(until).drop(from) + override def slice(from: Int, until: Int): LazyListIterable[A]^{this} = take(until).drop(from) /** @inheritdoc * * $evaluatesAllElements */ - override def reverse: LazyList[A] = reverseOnto(LazyList.empty) + def reverse: LazyListIterable[A] = reverseOnto(LazyListIterable.empty) - // need contravariant type B to make the compiler happy - still returns LazyList[A] + // need contravariant type B to make the compiler happy - still returns LazyListIterable[A] @tailrec - private def reverseOnto[B >: A](tl: LazyList[B]): LazyList[B] = + private def reverseOnto[B >: A](tl: LazyListIterable[B]): LazyListIterable[B] = if (isEmpty) tl else tail.reverseOnto(newLL(sCons(head, tl))) - /** @inheritdoc - * - * $preservesLaziness - */ - override def diff[B >: A](that: collection.Seq[B]): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else super.diff(that) - - /** @inheritdoc - * - * $preservesLaziness - */ - override def intersect[B >: A](that: collection.Seq[B]): LazyList[A] = - if (knownIsEmpty) LazyList.empty - else super.intersect(that) - @tailrec private def lengthGt(len: Int): Boolean = if (len < 0) true @@ -763,7 +756,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * The iterator returned by this method mostly preserves laziness; * a single element ahead of the iterator is evaluated. */ - override def grouped(size: Int): Iterator[LazyList[A]] = { + override def grouped(size: Int): Iterator[LazyListIterable[A]] = { require(size > 0, "size must be positive, but was " + size) slidingImpl(size = size, step = size) } @@ -773,12 +766,12 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * The iterator returned by this method mostly preserves laziness; * `size - step max 1` elements ahead of the iterator are evaluated. */ - override def sliding(size: Int, step: Int): Iterator[LazyList[A]] = { + override def sliding(size: Int, step: Int): Iterator[LazyListIterable[A]] = { require(size > 0 && step > 0, s"size=$size and step=$step, but both must be positive") slidingImpl(size = size, step = step) } - @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyList[A]] = + @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyListIterable[A]] = if (knownIsEmpty) Iterator.empty else new SlidingIterator[A](this, size = size, step = step) @@ -786,10 +779,10 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def padTo[B >: A](len: Int, elem: B): LazyList[B] = { + def padTo[B >: A](len: Int, elem: B): LazyListIterable[B]^{this} = { if (len <= 0) this else newLL { - if (isEmpty) LazyList.fill(len)(elem).state + if (isEmpty) LazyListIterable.fill(len)(elem).state else sCons(head, tail.padTo(len - 1, elem)) } } @@ -798,13 +791,13 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * $preservesLaziness */ - override def patch[B >: A](from: Int, other: IterableOnce[B], replaced: Int): LazyList[B] = - if (knownIsEmpty) LazyList from other + def patch[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): LazyListIterable[B]^{this, other} = + if (knownIsEmpty) LazyListIterable from other else patchImpl(from, other, replaced) - private def patchImpl[B >: A](from: Int, other: IterableOnce[B], replaced: Int): LazyList[B] = + private def patchImpl[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): LazyListIterable[B]^{this, other} = newLL { - if (from <= 0) stateFromIteratorConcatSuffix(other.iterator)(LazyList.dropImpl(this, replaced).state) + if (from <= 0) stateFromIteratorConcatSuffix(other.iterator)(LazyListIterable.dropImpl(this, replaced).state) else if (isEmpty) stateFromIterator(other.iterator) else sCons(head, tail.patchImpl(from - 1, other, replaced)) } @@ -814,17 +807,17 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * $evaluatesAllElements */ // overridden just in case a lazy implementation is developed at some point - override def transpose[B](implicit asIterable: A => collection.Iterable[B]): LazyList[LazyList[B]] = super.transpose + override def transpose[B](implicit asIterable: A -> collection.Iterable[B]): LazyListIterable[LazyListIterable[B]]^{this} = super.transpose /** @inheritdoc * * $preservesLaziness */ - override def updated[B >: A](index: Int, elem: B): LazyList[B] = + def updated[B >: A](index: Int, elem: B): LazyListIterable[B]^{this} = if (index < 0) throw new IndexOutOfBoundsException(s"$index") else updatedImpl(index, elem, index) - private def updatedImpl[B >: A](index: Int, elem: B, startIndex: Int): LazyList[B] = { + private def updatedImpl[B >: A](index: Int, elem: B, startIndex: Int): LazyListIterable[B]^{this} = { newLL { if (index <= 0) sCons(elem, tail) else if (tail.isEmpty) throw new IndexOutOfBoundsException(startIndex.toString) @@ -859,9 +852,9 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta else if (!isEmpty) { b.append(head) var cursor = this - @inline def appendCursorElement(): Unit = b.append(sep).append(cursor.head) + inline def appendCursorElement(): Unit = b.append(sep).append(cursor.head) var scout = tail - @inline def scoutNonEmpty: Boolean = scout.stateDefined && !scout.isEmpty + inline def scoutNonEmpty: Boolean = scout.stateDefined && !scout.isEmpty if ((cursor ne scout) && (!scout.stateDefined || (cursor.state ne scout.state))) { cursor = scout if (scoutNonEmpty) { @@ -883,7 +876,7 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta // if cursor (eq scout) has state defined, it is empty; else unknown state if (!cursor.stateDefined) b.append(sep).append("") } else { - @inline def same(a: LazyList[A], b: LazyList[A]): Boolean = (a eq b) || (a.state eq b.state) + @inline def same(a: LazyListIterable[A]^, b: LazyListIterable[A]^): Boolean = (a eq b) || (a.state eq b.state) // Cycle. // If we have a prefix of length P followed by a cycle of length C, // the scout will be at position (P%C) in the cycle when the cursor @@ -926,9 +919,9 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta * * Examples: * - * - `"LazyList(4, <not computed>)"`, a non-empty lazy list ; - * - `"LazyList(1, 2, 3, <not computed>)"`, a lazy list with at least three elements ; - * - `"LazyList(1, 2, 3, <cycle>)"`, an infinite lazy list that contains + * - `"LazyListIterable(4, <not computed>)"`, a non-empty lazy list ; + * - `"LazyListIterable(1, 2, 3, <not computed>)"`, a lazy list with at least three elements ; + * - `"LazyListIterable(1, 2, 3, <cycle>)"`, an infinite lazy list that contains * a cycle at the fourth element. */ override def toString(): String = addStringNoForce(new JStringBuilder(className), "(", ", ", ")").toString @@ -963,48 +956,49 @@ final class LazyList[+A] private(private[this] var lazyState: () => LazyList.Sta /** * $factoryInfo * @define coll lazy list - * @define Coll `LazyList` + * @define Coll `LazyListIterable` */ @SerialVersionUID(3L) -object LazyList extends SeqFactory[LazyList] { +object LazyListIterable extends IterableFactory[LazyListIterable] { // Eagerly evaluate cached empty instance private[this] val _empty = newLL(State.Empty).force private sealed trait State[+A] extends Serializable { + this: State[A]^ => def head: A - def tail: LazyList[A] + def tail: LazyListIterable[A]^ } private object State { @SerialVersionUID(3L) object Empty extends State[Nothing] { def head: Nothing = throw new NoSuchElementException("head of empty lazy list") - def tail: LazyList[Nothing] = throw new UnsupportedOperationException("tail of empty lazy list") + def tail: LazyListIterable[Nothing] = throw new UnsupportedOperationException("tail of empty lazy list") } @SerialVersionUID(3L) - final class Cons[A](val head: A, val tail: LazyList[A]) extends State[A] + final class Cons[A](val head: A, val tail: LazyListIterable[A]^) extends State[A] } - /** Creates a new LazyList. */ - @inline private def newLL[A](state: => State[A]): LazyList[A] = new LazyList[A](() => state) + /** Creates a new LazyListIterable. */ + @inline private def newLL[A](state: => State[A]^): LazyListIterable[A]^{state} = new LazyListIterable[A](() => state) /** Creates a new State.Cons. */ - @inline private def sCons[A](hd: A, tl: LazyList[A]): State[A] = new State.Cons[A](hd, tl) + @inline private def sCons[A](hd: A, tl: LazyListIterable[A]^): State[A]^{tl} = new State.Cons[A](hd, tl) private val anyToMarker: Any => Any = _ => Statics.pfMarker /* All of the following `Impl` methods are carefully written so as not to - * leak the beginning of the `LazyList`. They copy the initial `LazyList` (`ll`) into + * leak the beginning of the `LazyListIterable`. They copy the initial `LazyListIterable` (`ll`) into * `var rest`, which gets closed over as a `scala.runtime.ObjectRef`, thus not permanently - * leaking the head of the `LazyList`. Additionally, the methods are written so that, should - * an exception be thrown by the evaluation of the `LazyList` or any supplied function, they + * leaking the head of the `LazyListIterable`. Additionally, the methods are written so that, should + * an exception be thrown by the evaluation of the `LazyListIterable` or any supplied function, they * can continue their execution where they left off. */ - private def filterImpl[A](ll: LazyList[A], p: A => Boolean, isFlipped: Boolean): LazyList[A] = { + private def filterImpl[A](ll: LazyListIterable[A]^, p: A => Boolean, isFlipped: Boolean): LazyListIterable[A]^{ll, p} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) + var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[filterImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric newLL { var elem: A = null.asInstanceOf[A] var found = false @@ -1019,9 +1013,9 @@ object LazyList extends SeqFactory[LazyList] { } } - private def collectImpl[A, B](ll: LazyList[A], pf: PartialFunction[A, B]): LazyList[B] = { + private def collectImpl[A, B](ll: LazyListIterable[A]^, pf: PartialFunction[A, B]^): LazyListIterable[B]^{ll, pf} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) + var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[collectImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric newLL { val marker = Statics.pfMarker val toMarker = anyToMarker.asInstanceOf[A => B] // safe because Function1 is erased @@ -1038,11 +1032,11 @@ object LazyList extends SeqFactory[LazyList] { } } - private def flatMapImpl[A, B](ll: LazyList[A], f: A => IterableOnce[B]): LazyList[B] = { + private def flatMapImpl[A, B](ll: LazyListIterable[A]^, f: A => IterableOnce[B]^): LazyListIterable[B]^{ll, f} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) + var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[flatMapImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric newLL { - var it: Iterator[B] = null + var it: Iterator[B @uncheckedCaptures]^{ll, f} = null var itHasNext = false var rest = restRef // var rest = restRef.elem while (!itHasNext && !rest.isEmpty) { @@ -1062,9 +1056,9 @@ object LazyList extends SeqFactory[LazyList] { } } - private def dropImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { + private def dropImpl[A](ll: LazyListIterable[A]^, n: Int): LazyListIterable[A]^{ll} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) + var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[dropImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric var iRef = n // val iRef = new IntRef(n) newLL { var rest = restRef // var rest = restRef.elem @@ -1079,9 +1073,9 @@ object LazyList extends SeqFactory[LazyList] { } } - private def dropWhileImpl[A](ll: LazyList[A], p: A => Boolean): LazyList[A] = { + private def dropWhileImpl[A](ll: LazyListIterable[A]^, p: A => Boolean): LazyListIterable[A]^{ll, p} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) + var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[dropWhileImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric newLL { var rest = restRef // var rest = restRef.elem while (!rest.isEmpty && p(rest.head)) { @@ -1092,10 +1086,10 @@ object LazyList extends SeqFactory[LazyList] { } } - private def takeRightImpl[A](ll: LazyList[A], n: Int): LazyList[A] = { + private def takeRightImpl[A](ll: LazyListIterable[A]^, n: Int): LazyListIterable[A]^{ll} = { // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD - var restRef = ll // val restRef = new ObjectRef(ll) - var scoutRef = ll // val scoutRef = new ObjectRef(ll) + var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[takeRightImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric + var scoutRef: LazyListIterable[A @uncheckedCaptures]^{cap[takeRightImpl]} = ll // same situation var remainingRef = n // val remainingRef = new IntRef(n) newLL { var scout = scoutRef // var scout = scoutRef.elem @@ -1120,117 +1114,115 @@ object LazyList extends SeqFactory[LazyList] { } } - /** An alternative way of building and matching lazy lists using LazyList.cons(hd, tl). + /** An alternative way of building and matching lazy lists using LazyListIterable.cons(hd, tl). */ object cons { /** A lazy list consisting of a given first element and remaining elements * @param hd The first element of the result lazy list * @param tl The remaining elements of the result lazy list */ - def apply[A](hd: => A, tl: => LazyList[A]): LazyList[A] = newLL(sCons(hd, newLL(tl.state))) + def apply[A](hd: => A, tl: => LazyListIterable[A]^): LazyListIterable[A]^{hd, tl} = newLL(sCons(hd, newLL(tl.state))) /** Maps a lazy list to its head and tail */ - def unapply[A](xs: LazyList[A]): Option[(A, LazyList[A])] = #::.unapply(xs) + def unapply[A](xs: LazyListIterable[A]^): Option[(A, LazyListIterable[A]^{xs})] = #::.unapply(xs) } - implicit def toDeferrer[A](l: => LazyList[A]): Deferrer[A] = new Deferrer[A](() => l) - - final class Deferrer[A] private[LazyList] (private val l: () => LazyList[A]) extends AnyVal { - /** Construct a LazyList consisting of a given first element followed by elements - * from another LazyList. + extension [A](l: => LazyListIterable[A]) + /** Construct a LazyListIterable consisting of a given first element followed by elements + * from another LazyListIterable. */ - def #:: [B >: A](elem: => B): LazyList[B] = newLL(sCons(elem, newLL(l().state))) - /** Construct a LazyList consisting of the concatenation of the given LazyList and - * another LazyList. + def #:: [B >: A](elem: => B): LazyListIterable[B]^{elem, l} = newLL(sCons(elem, newLL(l.state))) + + /** Construct a LazyListIterable consisting of the concatenation of the given LazyListIterable and + * another LazyListIterable. */ - def #:::[B >: A](prefix: LazyList[B]): LazyList[B] = prefix lazyAppendedAll l() - } + def #:::[B >: A](prefix: LazyListIterable[B]^): LazyListIterable[B]^{prefix, l} = prefix lazyAppendedAll l object #:: { - def unapply[A](s: LazyList[A]): Option[(A, LazyList[A])] = + def unapply[A](s: LazyListIterable[A]^): Option[(A, LazyListIterable[A]^{s})] = if (!s.isEmpty) Some((s.head, s.tail)) else None } - def from[A](coll: collection.IterableOnce[A]): LazyList[A] = coll match { - case lazyList: LazyList[A] => lazyList + def from[A](coll: collection.IterableOnce[A]^): LazyListIterable[A]^{coll} = coll match { + case lazyList: LazyListIterable[A] => lazyList case _ if coll.knownSize == 0 => empty[A] case _ => newLL(stateFromIterator(coll.iterator)) } - def empty[A]: LazyList[A] = _empty + def empty[A]: LazyListIterable[A] = _empty /** Creates a State from an Iterator, with another State appended after the Iterator * is empty. */ - private def stateFromIteratorConcatSuffix[A](it: Iterator[A])(suffix: => State[A]): State[A] = + private def stateFromIteratorConcatSuffix[A](it: Iterator[A]^)(suffix: => State[A]^): State[A]^{it, suffix} = if (it.hasNext) sCons(it.next(), newLL(stateFromIteratorConcatSuffix(it)(suffix))) else suffix /** Creates a State from an IterableOnce. */ - private def stateFromIterator[A](it: Iterator[A]): State[A] = + private def stateFromIterator[A](it: Iterator[A]^): State[A]^{it} = if (it.hasNext) sCons(it.next(), newLL(stateFromIterator(it))) else State.Empty - override def concat[A](xss: collection.Iterable[A]*): LazyList[A] = + override def concat[A](xss: collection.Iterable[A]*): LazyListIterable[A] = if (xss.knownSize == 0) empty else newLL(concatIterator(xss.iterator)) - private def concatIterator[A](it: Iterator[collection.Iterable[A]]): State[A] = + private def concatIterator[A](it: Iterator[collection.Iterable[A]]^): State[A]^{it} = if (!it.hasNext) State.Empty else stateFromIteratorConcatSuffix(it.next().iterator)(concatIterator(it)) - /** An infinite LazyList that repeatedly applies a given function to a start value. + /** An infinite LazyListIterable that repeatedly applies a given function to a start value. * - * @param start the start value of the LazyList + * @param start the start value of the LazyListIterable * @param f the function that's repeatedly applied - * @return the LazyList returning the infinite sequence of values `start, f(start), f(f(start)), ...` + * @return the LazyListIterable returning the infinite sequence of values `start, f(start), f(f(start)), ...` */ - def iterate[A](start: => A)(f: A => A): LazyList[A] = + def iterate[A](start: => A)(f: A => A): LazyListIterable[A]^{start, f} = newLL { val head = start sCons(head, iterate(f(head))(f)) } /** - * Create an infinite LazyList starting at `start` and incrementing by + * Create an infinite LazyListIterable starting at `start` and incrementing by * step `step`. * - * @param start the start value of the LazyList - * @param step the increment value of the LazyList - * @return the LazyList starting at value `start`. + * @param start the start value of the LazyListIterable + * @param step the increment value of the LazyListIterable + * @return the LazyListIterable starting at value `start`. */ - def from(start: Int, step: Int): LazyList[Int] = + def from(start: Int, step: Int): LazyListIterable[Int] = newLL(sCons(start, from(start + step, step))) /** - * Create an infinite LazyList starting at `start` and incrementing by `1`. + * Create an infinite LazyListIterable starting at `start` and incrementing by `1`. * - * @param start the start value of the LazyList - * @return the LazyList starting at value `start`. + * @param start the start value of the LazyListIterable + * @return the LazyListIterable starting at value `start`. */ - def from(start: Int): LazyList[Int] = from(start, 1) + def from(start: Int): LazyListIterable[Int] = from(start, 1) /** - * Create an infinite LazyList containing the given element expression (which + * Create an infinite LazyListIterable containing the given element expression (which * is computed for each occurrence). * - * @param elem the element composing the resulting LazyList - * @return the LazyList containing an infinite number of elem + * @param elem the element composing the resulting LazyListIterable + * @return the LazyListIterable containing an infinite number of elem */ - def continually[A](elem: => A): LazyList[A] = newLL(sCons(elem, continually(elem))) + def continually[A](elem: => A): LazyListIterable[A]^{elem} = newLL(sCons(elem, continually(elem))) - override def fill[A](n: Int)(elem: => A): LazyList[A] = + override def fill[A](n: Int)(elem: => A): LazyListIterable[A]^{elem} = if (n > 0) newLL(sCons(elem, fill(n - 1)(elem))) else empty - override def tabulate[A](n: Int)(f: Int => A): LazyList[A] = { - def at(index: Int): LazyList[A] = + override def tabulate[A](n: Int)(f: Int => A): LazyListIterable[A]^{f} = { + def at(index: Int): LazyListIterable[A]^{f} = if (index < n) newLL(sCons(f(index), at(index + 1))) else empty at(0) } // significantly simpler than the iterator returned by Iterator.unfold - override def unfold[A, S](init: S)(f: S => Option[(A, S)]): LazyList[A] = + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): LazyListIterable[A]^{f} = newLL { f(init) match { case Some((elem, state)) => sCons(elem, unfold(state)(f)) @@ -1244,9 +1236,9 @@ object LazyList extends SeqFactory[LazyList] { * @tparam A the type of the ${coll}’s elements * @return A builder for $Coll objects. */ - def newBuilder[A]: Builder[A, LazyList[A]] = new LazyBuilder[A] + def newBuilder[A]: Builder[A, LazyListIterable[A]] = new LazyBuilder[A] - private class LazyIterator[+A](private[this] var lazyList: LazyList[A]) extends AbstractIterator[A] { + private class LazyIterator[+A](private[this] var lazyList: LazyListIterable[A]^) extends AbstractIterator[A] { override def hasNext: Boolean = !lazyList.isEmpty override def next(): A = @@ -1258,8 +1250,9 @@ object LazyList extends SeqFactory[LazyList] { } } - private class SlidingIterator[A](private[this] var lazyList: LazyList[A], size: Int, step: Int) - extends AbstractIterator[LazyList[A]] { + private class SlidingIterator[A](private[this] var lazyList: LazyListIterable[A]^, size: Int, step: Int) + extends AbstractIterator[LazyListIterable[A]] { + this: SlidingIterator[A]^ => private val minLen = size - step max 0 private var first = true @@ -1267,7 +1260,7 @@ object LazyList extends SeqFactory[LazyList] { if (first) !lazyList.isEmpty else lazyList.lengthGt(minLen) - def next(): LazyList[A] = { + def next(): LazyListIterable[A] = { if (!hasNext) Iterator.empty.next() else { first = false @@ -1278,20 +1271,21 @@ object LazyList extends SeqFactory[LazyList] { } } - private final class WithFilter[A] private[LazyList](lazyList: LazyList[A], p: A => Boolean) - extends collection.WithFilter[A, LazyList] { + private final class WithFilter[A] private[LazyListIterable](lazyList: LazyListIterable[A]^, p: A => Boolean) + extends collection.WithFilter[A, LazyListIterable] { + this: WithFilter[A]^ => private[this] val filtered = lazyList.filter(p) - def map[B](f: A => B): LazyList[B] = filtered.map(f) - def flatMap[B](f: A => IterableOnce[B]): LazyList[B] = filtered.flatMap(f) + def map[B](f: A => B): LazyListIterable[B]^{this, f} = filtered.map(f) + def flatMap[B](f: A => IterableOnce[B]^): LazyListIterable[B]^{this, f} = filtered.flatMap(f) def foreach[U](f: A => U): Unit = filtered.foreach(f) - def withFilter(q: A => Boolean): collection.WithFilter[A, LazyList] = new WithFilter(filtered, q) + def withFilter(q: A => Boolean): collection.WithFilter[A, LazyListIterable]^{this, q} = new WithFilter(filtered, q) } - private final class LazyBuilder[A] extends ReusableBuilder[A, LazyList[A]] { + private final class LazyBuilder[A] extends ReusableBuilder[A, LazyListIterable[A]] { import LazyBuilder._ - private[this] var next: DeferredState[A] = _ - private[this] var list: LazyList[A] = _ + private[this] var next: DeferredState[A @uncheckedCaptures] = _ + private[this] var list: LazyListIterable[A @uncheckedCaptures] = _ clear() @@ -1301,7 +1295,7 @@ object LazyList extends SeqFactory[LazyList] { next = deferred } - override def result(): LazyList[A] = { + override def result(): LazyListIterable[A] = { next init State.Empty list } @@ -1314,10 +1308,10 @@ object LazyList extends SeqFactory[LazyList] { } // lazy implementation which doesn't evaluate the collection being added - override def addAll(xs: IterableOnce[A]): this.type = { + override def addAll(xs: IterableOnce[A]^): this.type = { if (xs.knownSize != 0) { val deferred = new DeferredState[A] - next init stateFromIteratorConcatSuffix(xs.iterator)(deferred.eval()) + next.init(stateFromIteratorConcatSuffix(xs.iterator)(deferred.eval())) next = deferred } this @@ -1326,16 +1320,17 @@ object LazyList extends SeqFactory[LazyList] { private object LazyBuilder { final class DeferredState[A] { - private[this] var _state: () => State[A] = _ + this: DeferredState[A]^ => + private[this] var _state: (() => State[A]^) @uncheckedCaptures = _ - def eval(): State[A] = { + def eval(): State[A]^ = { val state = _state if (state == null) throw new IllegalStateException("uninitialized") state() } // racy - def init(state: => State[A]): Unit = { + def init(state: => State[A]^): Unit = { if (_state != null) throw new IllegalStateException("already initialized") _state = () => state } @@ -1348,7 +1343,7 @@ object LazyList extends SeqFactory[LazyList] { * of long evaluated lazy lists without exhausting the stack through recursive serialization of cons cells. */ @SerialVersionUID(3L) - final class SerializationProxy[A](@transient protected var coll: LazyList[A]) extends Serializable { + final class SerializationProxy[A](@transient protected var coll: LazyListIterable[A]^) extends Serializable { private[this] def writeObject(out: ObjectOutputStream): Unit = { out.defaultWriteObject() @@ -1363,15 +1358,15 @@ object LazyList extends SeqFactory[LazyList] { private[this] def readObject(in: ObjectInputStream): Unit = { in.defaultReadObject() - val init = new mutable.ListBuffer[A] + val init = new mutable.ListBuffer[A @uncheckedCaptures] var initRead = false while (!initRead) in.readObject match { case SerializeEnd => initRead = true case a => init += a.asInstanceOf[A] } - val tail = in.readObject().asInstanceOf[LazyList[A]] + val tail = in.readObject().asInstanceOf[LazyListIterable[A]] // scala/scala#10118: caution that no code path can evaluate `tail.state` - // before the resulting LazyList is returned + // before the resulting LazyListIterable is returned val it = init.toList.iterator coll = newLL(stateFromIteratorConcatSuffix(it)(tail.state)) } diff --git a/tests/pos-special/stdlib/collection/immutable/List.scala b/tests/pos-special/stdlib/collection/immutable/List.scala index 5358922752fb..6a305f4ebdec 100644 --- a/tests/pos-special/stdlib/collection/immutable/List.scala +++ b/tests/pos-special/stdlib/collection/immutable/List.scala @@ -14,11 +14,12 @@ package scala package collection package immutable -import scala.annotation.unchecked.uncheckedVariance +import scala.annotation.unchecked.{uncheckedVariance, uncheckedCaptures} import scala.annotation.tailrec import mutable.{Builder, ListBuffer} import scala.collection.generic.DefaultSerializable import scala.runtime.Statics.releaseFence +import language.experimental.captureChecking /** A class for immutable linked lists representing ordered collections * of elements of type `A`. @@ -143,7 +144,7 @@ sealed abstract class List[+A] override def prepended[B >: A](elem: B): List[B] = elem :: this - override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): List[B] = prefix match { + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): List[B] = prefix match { case xs: List[B] => xs ::: this case _ if prefix.knownSize == 0 => this case b: ListBuffer[B] if this.isEmpty => b.toList @@ -165,7 +166,7 @@ sealed abstract class List[+A] } // When calling appendAll with another list `suffix`, avoid copying `suffix` - override def appendedAll[B >: A](suffix: collection.IterableOnce[B]): List[B] = suffix match { + override def appendedAll[B >: A](suffix: collection.IterableOnce[B]^): List[B] = suffix match { case xs: List[B] => this ::: xs case _ => super.appendedAll(suffix) } @@ -214,7 +215,7 @@ sealed abstract class List[+A] // dropRight is inherited from LinearSeq override def splitAt(n: Int): (List[A], List[A]) = { - val b = new ListBuffer[A] + val b = new ListBuffer[A @uncheckedCaptures] var i = 0 var these = this while (!these.isEmpty && i < n) { @@ -257,7 +258,7 @@ sealed abstract class List[+A] } } - final override def collect[B](pf: PartialFunction[A, B]): List[B] = { + final override def collect[B](pf: PartialFunction[A, B]^): List[B] = { if (this eq Nil) Nil else { var rest = this var h: ::[B] = null @@ -285,7 +286,7 @@ sealed abstract class List[+A] } } - final override def flatMap[B](f: A => IterableOnce[B]): List[B] = { + final override def flatMap[B](f: A => IterableOnce[B]^): List[B] = { var rest = this var h: ::[B] = null var t: ::[B] = null @@ -306,7 +307,7 @@ sealed abstract class List[+A] } @inline final override def takeWhile(p: A => Boolean): List[A] = { - val b = new ListBuffer[A] + val b = new ListBuffer[A @uncheckedCaptures] var these = this while (!these.isEmpty && p(these.head)) { b += these.head @@ -316,7 +317,7 @@ sealed abstract class List[+A] } @inline final override def span(p: A => Boolean): (List[A], List[A]) = { - val b = new ListBuffer[A] + val b = new ListBuffer[A @uncheckedCaptures] var these = this while (!these.isEmpty && p(these.head)) { b += these.head @@ -651,7 +652,7 @@ sealed abstract class List[+A] // Internal code that mutates `next` _must_ call `Statics.releaseFence()` if either immediately, or // before a newly-allocated, thread-local :: instance is aliased (e.g. in ListBuffer.toList) -final case class :: [+A](override val head: A, private[scala] var next: List[A @uncheckedVariance]) // sound because `next` is used only locally +final case class :: [+A](override val head: A, private[scala] var next: List[A @uncheckedVariance @uncheckedCaptures]) // sound because `next` is used only locally extends List[A] { releaseFence() override def headOption: Some[A] = Some(head) @@ -666,7 +667,7 @@ case object Nil extends List[Nothing] { override def init: Nothing = throw new UnsupportedOperationException("init of empty list") override def knownSize: Int = 0 override def iterator: Iterator[Nothing] = Iterator.empty - override def unzip[A1, A2](implicit asPair: Nothing => (A1, A2)): (List[A1], List[A2]) = EmptyUnzip + override def unzip[A1, A2](implicit asPair: Nothing -> (A1, A2)): (List[A1], List[A2]) = EmptyUnzip @transient private[this] val EmptyUnzip = (Nil, Nil) @@ -681,9 +682,9 @@ case object Nil extends List[Nothing] { object List extends StrictOptimizedSeqFactory[List] { private val TupleOfNil = (Nil, Nil) - def from[B](coll: collection.IterableOnce[B]): List[B] = Nil.prependedAll(coll) + def from[B](coll: collection.IterableOnce[B]^): List[B] = Nil.prependedAll(coll) - def newBuilder[A]: Builder[A, List[A]] = new ListBuffer() + def newBuilder[A]: Builder[A, List[A]] = new ListBuffer[A @uncheckedCaptures]() def empty[A]: List[A] = Nil diff --git a/tests/pos-special/stdlib/collection/immutable/ListMap.scala b/tests/pos-special/stdlib/collection/immutable/ListMap.scala index 4a2b8dbd807c..c5000d785144 100644 --- a/tests/pos-special/stdlib/collection/immutable/ListMap.scala +++ b/tests/pos-special/stdlib/collection/immutable/ListMap.scala @@ -19,6 +19,8 @@ import scala.collection.mutable.ReusableBuilder import scala.collection.generic.DefaultSerializable import scala.runtime.Statics.releaseFence import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** * This class implements immutable maps using a list-based data structure. List map iterators and @@ -131,8 +133,8 @@ object ListMap extends MapFactory[ListMap] { */ private[immutable] final class Node[K, V]( override private[immutable] val key: K, - private[immutable] var _value: V, - private[immutable] var _init: ListMap[K, V] + private[immutable] var _value: V @uncheckedCaptures, + private[immutable] var _init: ListMap[K, V] @uncheckedCaptures ) extends ListMap[K, V] { releaseFence() @@ -239,7 +241,7 @@ object ListMap extends MapFactory[ListMap] { private object EmptyListMap extends ListMap[Any, Nothing] - def from[K, V](it: collection.IterableOnce[(K, V)]): ListMap[K, V] = + def from[K, V](it: collection.IterableOnce[(K, V)]^): ListMap[K, V] = it match { case lm: ListMap[K, V] => lm case lhm: collection.mutable.LinkedHashMap[K, V] => @@ -285,7 +287,7 @@ object ListMap extends MapFactory[ListMap] { */ private[immutable] final class ListMapBuilder[K, V] extends mutable.ReusableBuilder[(K, V), ListMap[K, V]] { private[this] var isAliased: Boolean = false - private[this] var underlying: ListMap[K, V] = ListMap.empty + private[this] var underlying: ListMap[K, V] @uncheckedCaptures = ListMap.empty override def clear(): Unit = { underlying = ListMap.empty @@ -322,7 +324,7 @@ private[immutable] final class ListMapBuilder[K, V] extends mutable.ReusableBuil } this } - override def addAll(xs: IterableOnce[(K, V)]): this.type = { + override def addAll(xs: IterableOnce[(K, V)]^): this.type = { if (isAliased) { super.addAll(xs) } else if (underlying.nonEmpty) { diff --git a/tests/pos-special/stdlib/collection/immutable/ListSet.scala b/tests/pos-special/stdlib/collection/immutable/ListSet.scala index e2ab0de858da..719abd78e1e6 100644 --- a/tests/pos-special/stdlib/collection/immutable/ListSet.scala +++ b/tests/pos-special/stdlib/collection/immutable/ListSet.scala @@ -17,6 +17,8 @@ package immutable import mutable.{Builder, ImmutableBuilder} import scala.annotation.tailrec import scala.collection.generic.DefaultSerializable +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** * This class implements immutable sets using a list-based data structure. List set iterators and @@ -117,7 +119,7 @@ sealed class ListSet[A] @SerialVersionUID(3L) object ListSet extends IterableFactory[ListSet] { - def from[E](it: scala.collection.IterableOnce[E]): ListSet[E] = + def from[E](it: scala.collection.IterableOnce[E]^): ListSet[E] = it match { case ls: ListSet[E] => ls case _ if it.knownSize == 0 => empty[E] diff --git a/tests/pos-special/stdlib/collection/immutable/LongMap.scala b/tests/pos-special/stdlib/collection/immutable/LongMap.scala index c418dc7616ac..4abf433273f2 100644 --- a/tests/pos-special/stdlib/collection/immutable/LongMap.scala +++ b/tests/pos-special/stdlib/collection/immutable/LongMap.scala @@ -20,6 +20,8 @@ import scala.collection.mutable.{Builder, ImmutableBuilder, ListBuffer} import scala.annotation.tailrec import scala.annotation.unchecked.uncheckedVariance import scala.language.implicitConversions +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** Utility class for long maps. */ @@ -52,7 +54,7 @@ object LongMap { def apply[T](elems: (Long, T)*): LongMap[T] = elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) - def from[V](coll: IterableOnce[(Long, V)]): LongMap[V] = + def from[V](coll: IterableOnce[(Long, V)]^): LongMap[V] = newBuilder[V].addAll(coll).result() def newBuilder[V]: Builder[(Long, V), LongMap[V]] = @@ -86,13 +88,13 @@ object LongMap { @SerialVersionUID(3L) private[this] object ToFactory extends Factory[(Long, AnyRef), LongMap[AnyRef]] with Serializable { - def fromSpecific(it: IterableOnce[(Long, AnyRef)]): LongMap[AnyRef] = LongMap.from[AnyRef](it) + def fromSpecific(it: IterableOnce[(Long, AnyRef)]^): LongMap[AnyRef] = LongMap.from[AnyRef](it) def newBuilder: Builder[(Long, AnyRef), LongMap[AnyRef]] = LongMap.newBuilder[AnyRef] } implicit def toBuildFrom[V](factory: LongMap.type): BuildFrom[Any, (Long, V), LongMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Long, V), LongMap[V]]] private[this] object ToBuildFrom extends BuildFrom[Any, (Long, AnyRef), LongMap[AnyRef]] { - def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]) = LongMap.from(it) + def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]^) = LongMap.from(it) def newBuilder(from: Any) = LongMap.newBuilder[AnyRef] } @@ -176,7 +178,7 @@ sealed abstract class LongMap[+T] extends AbstractMap[Long, T] with StrictOptimizedMapOps[Long, T, Map, LongMap[T]] with Serializable { - override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, T)] @uncheckedVariance): LongMap[T] = { + override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, T) @uncheckedVariance]^): LongMap[T] = { //TODO should this be the default implementation of this method in StrictOptimizedIterableOps? val b = newSpecificBuilder b.sizeHint(coll) @@ -191,7 +193,7 @@ sealed abstract class LongMap[+T] extends AbstractMap[Long, T] override def empty: LongMap[T] = LongMap.Nil override def toList = { - val buffer = new ListBuffer[(Long, T)] + val buffer = new ListBuffer[(Long, T) @uncheckedCaptures] foreach(buffer += _) buffer.toList } @@ -478,10 +480,10 @@ sealed abstract class LongMap[+T] extends AbstractMap[Long, T] def flatMap[V2](f: ((Long, T)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f)) - override def concat[V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]): LongMap[V1] = + override def concat[V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = super.concat(that).asInstanceOf[LongMap[V1]] // Already has correct type but not declared as such - override def ++ [V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]): LongMap[V1] = concat(that) + override def ++ [V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = concat(that) def collect[V2](pf: PartialFunction[(Long, T), (Long, V2)]): LongMap[V2] = strictOptimizedCollect(LongMap.newBuilder[V2], pf) diff --git a/tests/pos-special/stdlib/collection/immutable/Map.scala b/tests/pos-special/stdlib/collection/immutable/Map.scala index 9d334893b8cc..6daad829bf55 100644 --- a/tests/pos-special/stdlib/collection/immutable/Map.scala +++ b/tests/pos-special/stdlib/collection/immutable/Map.scala @@ -18,6 +18,8 @@ import scala.annotation.unchecked.uncheckedVariance import scala.collection.generic.DefaultSerializable import scala.collection.immutable.Map.Map4 import scala.collection.mutable.{Builder, ReusableBuilder} +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** Base type of immutable Maps */ trait Map[K, +V] @@ -39,7 +41,7 @@ trait Map[K, +V] * @param d the function mapping keys to values, used for non-present keys * @return a wrapper of the map with a default value */ - def withDefault[V1 >: V](d: K => V1): Map[K, V1] = new Map.WithDefault[K, V1](this, d) + def withDefault[V1 >: V](d: K -> V1): Map[K, V1] = new Map.WithDefault[K, V1](this, d) /** The same map with a given default value. * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. @@ -86,10 +88,10 @@ trait MapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C] * @return a new $coll that contains all elements of the current $coll * except one less occurrence of each of the elements of `elems`. */ - def removedAll(keys: IterableOnce[K]): C = keys.iterator.foldLeft[C](coll)(_ - _) + def removedAll(keys: IterableOnce[K]^): C = keys.iterator.foldLeft[C](coll)(_ - _) /** Alias for `removedAll` */ - @`inline` final override def -- (keys: IterableOnce[K]): C = removedAll(keys) + @`inline` final override def -- (keys: IterableOnce[K]^): C = removedAll(keys) /** Creates a new map obtained by updating this map with a given key/value pair. * @param key the key @@ -153,7 +155,7 @@ trait StrictOptimizedMapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapO with collection.StrictOptimizedMapOps[K, V, CC, C] with StrictOptimizedIterableOps[(K, V), Iterable, C] { - override def concat [V1 >: V](that: collection.IterableOnce[(K, V1)]): CC[K, V1] = { + override def concat [V1 >: V](that: collection.IterableOnce[(K, V1)]^): CC[K, V1] = { var result: CC[K, V1] = coll val it = that.iterator while (it.hasNext) result = result + it.next() @@ -171,7 +173,7 @@ trait StrictOptimizedMapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapO object Map extends MapFactory[Map] { @SerialVersionUID(3L) - class WithDefault[K, +V](val underlying: Map[K, V], val defaultValue: K => V) + class WithDefault[K, +V](val underlying: Map[K, V], val defaultValue: K -> V) extends AbstractMap[K, V] with MapOps[K, V, Map, WithDefault[K, V]] with Serializable { @@ -187,7 +189,7 @@ object Map extends MapFactory[Map] { override def mapFactory: MapFactory[Map] = underlying.mapFactory - override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]): WithDefault[K, V2] = + override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]^): WithDefault[K, V2] = new WithDefault(underlying.concat(xs), defaultValue) def removed(key: K): WithDefault[K, V] = new WithDefault[K, V](underlying.removed(key), defaultValue) @@ -197,7 +199,7 @@ object Map extends MapFactory[Map] { override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) - override protected def fromSpecific(coll: collection.IterableOnce[(K, V)] @uncheckedVariance): WithDefault[K, V] = + override protected def fromSpecific(coll: collection.IterableOnce[(K, V) @uncheckedVariance]^): WithDefault[K, V] = new WithDefault[K, V](mapFactory.from(coll), defaultValue) override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] @uncheckedVariance = @@ -206,7 +208,7 @@ object Map extends MapFactory[Map] { def empty[K, V]: Map[K, V] = EmptyMap.asInstanceOf[Map[K, V]] - def from[K, V](it: collection.IterableOnce[(K, V)]): Map[K, V] = + def from[K, V](it: collection.IterableOnce[(K, V)]^): Map[K, V] = it match { case it: Iterable[_] if it.isEmpty => empty[K, V] case m: Map[K, V] => m @@ -229,7 +231,7 @@ object Map extends MapFactory[Map] { override def valuesIterator: Iterator[Nothing] = Iterator.empty def updated [V1] (key: Any, value: V1): Map[Any, V1] = new Map1(key, value) def removed(key: Any): Map[Any, Nothing] = this - override def concat[V2 >: Nothing](suffix: IterableOnce[(Any, V2)]): Map[Any, V2] = suffix match { + override def concat[V2 >: Nothing](suffix: IterableOnce[(Any, V2)]^): Map[Any, V2] = suffix match { case m: immutable.Map[Any, V2] => m case _ => super.concat(suffix) } @@ -313,7 +315,7 @@ object Map extends MapFactory[Map] { override protected def nextResult(k: K, v: V): V = v } - private abstract class Map2Iterator[A] extends AbstractIterator[A] { + private abstract class Map2Iterator[A] extends AbstractIterator[A], Pure { private[this] var i = 0 override def hasNext: Boolean = i < 2 override def next(): A = { @@ -416,7 +418,7 @@ object Map extends MapFactory[Map] { override protected def nextResult(k: K, v: V): V = v } - private abstract class Map3Iterator[A] extends AbstractIterator[A] { + private abstract class Map3Iterator[A] extends AbstractIterator[A], Pure { private[this] var i = 0 override def hasNext: Boolean = i < 3 override def next(): A = { @@ -536,7 +538,7 @@ object Map extends MapFactory[Map] { override protected def nextResult(k: K, v: V): V = v } - private abstract class Map4Iterator[A] extends AbstractIterator[A] { + private abstract class Map4Iterator[A] extends AbstractIterator[A], Pure { private[this] var i = 0 override def hasNext: Boolean = i < 4 override def next(): A = { @@ -639,9 +641,9 @@ object Map extends MapFactory[Map] { abstract class AbstractMap[K, +V] extends scala.collection.AbstractMap[K, V] with Map[K, V] private[immutable] final class MapBuilderImpl[K, V] extends ReusableBuilder[(K, V), Map[K, V]] { - private[this] var elems: Map[K, V] = Map.empty + private[this] var elems: Map[K, V] @uncheckedCaptures = Map.empty private[this] var switchedToHashMapBuilder: Boolean = false - private[this] var hashMapBuilder: HashMapBuilder[K, V] = _ + private[this] var hashMapBuilder: HashMapBuilder[K, V] @uncheckedCaptures = _ private[immutable] def getOrElse[V0 >: V](key: K, value: V0): V0 = if (hashMapBuilder ne null) hashMapBuilder.getOrElse(key, value) @@ -682,7 +684,7 @@ private[immutable] final class MapBuilderImpl[K, V] extends ReusableBuilder[(K, def addOne(elem: (K, V)) = addOne(elem._1, elem._2) - override def addAll(xs: IterableOnce[(K, V)]): this.type = + override def addAll(xs: IterableOnce[(K, V)]^): this.type = if (switchedToHashMapBuilder) { hashMapBuilder.addAll(xs) this diff --git a/tests/pos-special/stdlib/collection/immutable/NumericRange.scala b/tests/pos-special/stdlib/collection/immutable/NumericRange.scala index d1ee494711a7..f26d9728e5ad 100644 --- a/tests/pos-special/stdlib/collection/immutable/NumericRange.scala +++ b/tests/pos-special/stdlib/collection/immutable/NumericRange.scala @@ -14,6 +14,8 @@ package scala.collection.immutable import scala.collection.Stepper.EfficientSplit import scala.collection.{AbstractIterator, AnyStepper, IterableFactoryDefaults, Iterator, Stepper, StepperShape} +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** `NumericRange` is a more generic version of the * `Range` class which works with arbitrary types. @@ -492,7 +494,7 @@ object NumericRange { import num.mkNumericOps private[this] var _hasNext = !self.isEmpty - private[this] var _next: T = self.start + private[this] var _next: T @uncheckedCaptures = self.start private[this] val lastElement: T = if (_hasNext) self.last else self.start override def knownSize: Int = if (_hasNext) num.toInt((lastElement - _next) / self.step) + 1 else 0 def hasNext: Boolean = _hasNext diff --git a/tests/pos-special/stdlib/collection/immutable/Queue.scala b/tests/pos-special/stdlib/collection/immutable/Queue.scala index 3d0f8206b6a9..929c79ce588a 100644 --- a/tests/pos-special/stdlib/collection/immutable/Queue.scala +++ b/tests/pos-special/stdlib/collection/immutable/Queue.scala @@ -15,6 +15,7 @@ package immutable import scala.collection.generic.DefaultSerializable import scala.collection.mutable.{Builder, ListBuffer} +import language.experimental.captureChecking /** `Queue` objects implement data structures that allow to * insert and retrieve elements in a first-in-first-out (FIFO) manner. @@ -119,7 +120,7 @@ sealed class Queue[+A] protected(protected val in: List[A], protected val out: L override def appended[B >: A](elem: B): Queue[B] = enqueue(elem) - override def appendedAll[B >: A](that: scala.collection.IterableOnce[B]): Queue[B] = { + override def appendedAll[B >: A](that: scala.collection.IterableOnce[B]^): Queue[B] = { val newIn = that match { case that: Queue[B] => that.in ++ (that.out reverse_::: this.in) case that: List[B] => that reverse_::: this.in @@ -200,9 +201,9 @@ sealed class Queue[+A] protected(protected val in: List[A], protected val out: L */ @SerialVersionUID(3L) object Queue extends StrictOptimizedSeqFactory[Queue] { - def newBuilder[A]: Builder[A, Queue[A]] = new ListBuffer[A] mapResult (x => new Queue[A](Nil, x)) + def newBuilder[sealed A]: Builder[A, Queue[A]] = new ListBuffer[A] mapResult (x => new Queue[A](Nil, x)) - def from[A](source: IterableOnce[A]): Queue[A] = source match { + def from[A](source: IterableOnce[A]^): Queue[A] = source match { case q: Queue[A] => q case _ => val list = List.from(source) diff --git a/tests/pos-special/stdlib/collection/immutable/Range.scala b/tests/pos-special/stdlib/collection/immutable/Range.scala index 66a149840488..459591d1a9cb 100644 --- a/tests/pos-special/stdlib/collection/immutable/Range.scala +++ b/tests/pos-special/stdlib/collection/immutable/Range.scala @@ -17,6 +17,7 @@ import scala.collection.Stepper.EfficientSplit import scala.collection.convert.impl.RangeStepper import scala.collection.{AbstractIterator, AnyStepper, IterableFactoryDefaults, Iterator, Stepper, StepperShape} import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking /** The `Range` class represents integer values in range * ''[start;end)'' with non-zero step value `step`. @@ -213,7 +214,7 @@ sealed abstract class Range( private[this] def posOf(i: Int): Int = if (contains(i)) (i - start) / step else -1 - override def sameElements[B >: Int](that: IterableOnce[B]): Boolean = that match { + override def sameElements[B >: Int](that: IterableOnce[B]^): Boolean = that match { case other: Range => (this.length : @annotation.switch) match { case 0 => other.isEmpty @@ -613,7 +614,7 @@ object Range { // As there is no appealing default step size for not-really-integral ranges, // we offer a partially constructed object. - class Partial[T, U](private val f: T => U) extends AnyVal { + class Partial[T, U](private val f: T -> U) extends AnyVal { def by(x: T): U = f(x) override def toString = "Range requires step" } diff --git a/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala b/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala index 2e7aa7b472ad..5fbc927d7a21 100644 --- a/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala +++ b/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala @@ -17,6 +17,8 @@ package immutable import scala.annotation.meta.{getter, setter} import scala.annotation.tailrec import scala.runtime.Statics.releaseFence +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`. * @@ -834,10 +836,11 @@ private[collection] object RedBlackTree { * we potentially do so in `startFrom`. */ val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(root.count + 2 - 1)) - 2 - new Array[Tree[A, B]](maximumHeight) + new Array[Tree[A, B] @uncheckedCaptures](maximumHeight) } private[this] var index = 0 - protected var lookahead: Tree[A, B] = if (start.isDefined) startFrom(start.get) else findLeftMostOrPopOnEmpty(root) + protected var lookahead: Tree[A, B] @uncheckedCaptures = + if (start.isDefined) startFrom(start.get) else findLeftMostOrPopOnEmpty(root) /** * Find the leftmost subtree whose key is equal to the given key, or if no such thing, diff --git a/tests/pos-special/stdlib/collection/immutable/Seq.scala b/tests/pos-special/stdlib/collection/immutable/Seq.scala index 925fd648c70c..d575c3aaf14a 100644 --- a/tests/pos-special/stdlib/collection/immutable/Seq.scala +++ b/tests/pos-special/stdlib/collection/immutable/Seq.scala @@ -14,6 +14,8 @@ package scala package collection package immutable +import language.experimental.captureChecking + trait Seq[+A] extends Iterable[A] with collection.Seq[A] with SeqOps[A, Seq, Seq[A]] @@ -37,7 +39,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with collection.SeqOps[A, CC, C] */ @SerialVersionUID(3L) object Seq extends SeqFactory.Delegate[Seq](List) { - override def from[E](it: IterableOnce[E]): Seq[E] = it match { + override def from[E](it: IterableOnce[E]^): Seq[E] = it match { case s: Seq[E] => s case _ => super.from(it) } @@ -57,7 +59,7 @@ trait IndexedSeq[+A] extends Seq[A] } - override def sameElements[B >: A](o: IterableOnce[B]): Boolean = o match { + override def sameElements[B >: A](o: IterableOnce[B]^): Boolean = o match { case that: IndexedSeq[_] => (this eq that) || { val length = this.length @@ -110,7 +112,7 @@ object IndexedSeqDefaults { @SerialVersionUID(3L) object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](Vector) { - override def from[E](it: IterableOnce[E]): IndexedSeq[E] = it match { + override def from[E](it: IterableOnce[E]^): IndexedSeq[E] = it match { case is: IndexedSeq[E] => is case _ => super.from(it) } @@ -141,14 +143,14 @@ trait LinearSeq[+A] @SerialVersionUID(3L) object LinearSeq extends SeqFactory.Delegate[LinearSeq](List) { - override def from[E](it: IterableOnce[E]): LinearSeq[E] = it match { + override def from[E](it: IterableOnce[E]^): LinearSeq[E] = it match { case ls: LinearSeq[E] => ls case _ => super.from(it) } } trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeqOps[A, CC, C]] - extends Any with SeqOps[A, CC, C] + extends AnyRef with SeqOps[A, CC, C] with collection.LinearSeqOps[A, CC, C] /** Explicit instantiation of the `Seq` trait to reduce class file size in subclasses. */ diff --git a/tests/pos-special/stdlib/collection/immutable/SeqMap.scala b/tests/pos-special/stdlib/collection/immutable/SeqMap.scala index aca9e139165e..6c955fd52fc2 100644 --- a/tests/pos-special/stdlib/collection/immutable/SeqMap.scala +++ b/tests/pos-special/stdlib/collection/immutable/SeqMap.scala @@ -15,6 +15,8 @@ package collection package immutable import scala.collection.mutable.{Builder, ReusableBuilder} +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** A base trait for ordered, immutable maps. * @@ -44,7 +46,7 @@ trait SeqMap[K, +V] object SeqMap extends MapFactory[SeqMap] { def empty[K, V]: SeqMap[K, V] = EmptySeqMap.asInstanceOf[SeqMap[K, V]] - def from[K, V](it: collection.IterableOnce[(K, V)]): SeqMap[K, V] = + def from[K, V](it: collection.IterableOnce[(K, V)]^): SeqMap[K, V] = it match { case sm: SeqMap[K, V] => sm case _ => (newBuilder[K, V] ++= it).result() @@ -228,9 +230,9 @@ object SeqMap extends MapFactory[SeqMap] { } private final class SeqMapBuilderImpl[K, V] extends ReusableBuilder[(K, V), SeqMap[K, V]] { - private[this] var elems: SeqMap[K, V] = SeqMap.empty + private[this] var elems: SeqMap[K, V] @uncheckedCaptures = SeqMap.empty private[this] var switchedToVectorMapBuilder: Boolean = false - private[this] var vectorMapBuilder: VectorMapBuilder[K, V] = _ + private[this] var vectorMapBuilder: VectorMapBuilder[K, V] @uncheckedCaptures = _ override def clear(): Unit = { elems = SeqMap.empty @@ -265,7 +267,7 @@ object SeqMap extends MapFactory[SeqMap] { this } - override def addAll(xs: IterableOnce[(K, V)]): this.type = + override def addAll(xs: IterableOnce[(K, V)]^): this.type = if (switchedToVectorMapBuilder) { vectorMapBuilder.addAll(xs) this diff --git a/tests/pos-special/stdlib/collection/immutable/Set.scala b/tests/pos-special/stdlib/collection/immutable/Set.scala index f07eb66991c8..ac92f81b2013 100644 --- a/tests/pos-special/stdlib/collection/immutable/Set.scala +++ b/tests/pos-special/stdlib/collection/immutable/Set.scala @@ -16,6 +16,8 @@ package immutable import scala.collection.immutable.Set.Set4 import scala.collection.mutable.{Builder, ReusableBuilder} +import language.experimental.captureChecking +import annotation.unchecked.uncheckedCaptures /** Base trait for immutable set collections */ trait Set[A] extends Iterable[A] @@ -94,7 +96,7 @@ object Set extends IterableFactory[Set] { def empty[A]: Set[A] = EmptySet.asInstanceOf[Set[A]] - def from[E](it: collection.IterableOnce[E]): Set[E] = + def from[E](it: collection.IterableOnce[E]^): Set[E] = it match { // We want `SortedSet` (and subclasses, such as `BitSet`) to // rebuild themselves to avoid element type widening issues @@ -128,7 +130,7 @@ object Set extends IterableFactory[Set] { private[collection] def emptyInstance: Set[Any] = EmptySet @SerialVersionUID(3L) - private abstract class SetNIterator[A](n: Int) extends AbstractIterator[A] with Serializable { + private abstract class SetNIterator[A](n: Int) extends AbstractIterator[A], Serializable, Pure { private[this] var current = 0 private[this] var remainder = n override def knownSize: Int = remainder @@ -351,9 +353,9 @@ abstract class AbstractSet[A] extends scala.collection.AbstractSet[A] with Set[A * $multipleResults */ private final class SetBuilderImpl[A] extends ReusableBuilder[A, Set[A]] { - private[this] var elems: Set[A] = Set.empty + private[this] var elems: Set[A @uncheckedCaptures] = Set.empty private[this] var switchedToHashSetBuilder: Boolean = false - private[this] var hashSetBuilder: HashSetBuilder[A] = _ + private[this] var hashSetBuilder: HashSetBuilder[A @uncheckedCaptures] = _ override def clear(): Unit = { elems = Set.empty @@ -388,7 +390,7 @@ private final class SetBuilderImpl[A] extends ReusableBuilder[A, Set[A]] { this } - override def addAll(xs: IterableOnce[A]): this.type = + override def addAll(xs: IterableOnce[A]^): this.type = if (switchedToHashSetBuilder) { hashSetBuilder.addAll(xs) this diff --git a/tests/pos-special/stdlib/collection/immutable/SortedMap.scala b/tests/pos-special/stdlib/collection/immutable/SortedMap.scala index 666d8c55bfb0..9587502fd908 100644 --- a/tests/pos-special/stdlib/collection/immutable/SortedMap.scala +++ b/tests/pos-special/stdlib/collection/immutable/SortedMap.scala @@ -16,6 +16,7 @@ package immutable import scala.annotation.unchecked.uncheckedVariance import scala.collection.mutable.Builder +import language.experimental.captureChecking /** An immutable map whose key-value pairs are sorted according to an [[scala.math.Ordering]] on the keys. * @@ -69,7 +70,7 @@ trait SortedMap[K, +V] * @param d the function mapping keys to values, used for non-present keys * @return a wrapper of the map with a default value */ - override def withDefault[V1 >: V](d: K => V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, d) + override def withDefault[V1 >: V](d: K -> V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, d) /** The same map with a given default value. * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. @@ -123,7 +124,7 @@ trait StrictOptimizedSortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapO with collection.StrictOptimizedSortedMapOps[K, V, CC, C] with StrictOptimizedMapOps[K, V, Map, C] { - override def concat[V2 >: V](xs: collection.IterableOnce[(K, V2)]): CC[K, V2] = { + override def concat[V2 >: V](xs: collection.IterableOnce[(K, V2)]^): CC[K, V2] = { var result: CC[K, V2] = coll val it = xs.iterator while (it.hasNext) result = result + it.next() @@ -134,12 +135,12 @@ trait StrictOptimizedSortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapO @SerialVersionUID(3L) object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { - override def from[K: Ordering, V](it: IterableOnce[(K, V)]): SortedMap[K, V] = it match { + override def from[K: Ordering, V](it: IterableOnce[(K, V)]^): SortedMap[K, V] = it match { case sm: SortedMap[K, V] if Ordering[K] == sm.ordering => sm case _ => super.from(it) } - final class WithDefault[K, +V](underlying: SortedMap[K, V], defaultValue: K => V) + final class WithDefault[K, +V](underlying: SortedMap[K, V], defaultValue: K -> V) extends Map.WithDefault[K, V](underlying, defaultValue) with SortedMap[K, V] with SortedMapOps[K, V, SortedMap, WithDefault[K, V]] with Serializable { @@ -161,14 +162,14 @@ object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { override def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = new WithDefault[K, V1](underlying.updated(key, value), defaultValue) - override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]): WithDefault[K, V2] = + override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]^): WithDefault[K, V2] = new WithDefault( underlying.concat(xs) , defaultValue) override def removed(key: K): WithDefault[K, V] = new WithDefault[K, V](underlying.removed(key), defaultValue) override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) - override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)] @uncheckedVariance): WithDefault[K, V] = + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V) @uncheckedVariance]^): WithDefault[K, V] = new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue) override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] @uncheckedVariance = diff --git a/tests/pos-special/stdlib/collection/immutable/SortedSet.scala b/tests/pos-special/stdlib/collection/immutable/SortedSet.scala index 303e5ea9658c..874abcaecda1 100644 --- a/tests/pos-special/stdlib/collection/immutable/SortedSet.scala +++ b/tests/pos-special/stdlib/collection/immutable/SortedSet.scala @@ -13,6 +13,7 @@ package scala package collection package immutable +import language.experimental.captureChecking /** Base trait for sorted sets */ trait SortedSet[A] @@ -50,7 +51,7 @@ trait StrictOptimizedSortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[ */ @SerialVersionUID(3L) object SortedSet extends SortedIterableFactory.Delegate[SortedSet](TreeSet) { - override def from[E: Ordering](it: IterableOnce[E]): SortedSet[E] = it match { + override def from[E: Ordering](it: IterableOnce[E]^): SortedSet[E] = it match { case ss: SortedSet[E] if Ordering[E] == ss.ordering => ss case _ => super.from(it) } diff --git a/tests/pos-special/stdlib/collection/immutable/Stream.scala b/tests/pos-special/stdlib/collection/immutable/Stream.scala deleted file mode 100644 index ae03641e97dd..000000000000 --- a/tests/pos-special/stdlib/collection/immutable/Stream.scala +++ /dev/null @@ -1,568 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package collection -package immutable - -import java.io.{ObjectInputStream, ObjectOutputStream} -import java.lang.{StringBuilder => JStringBuilder} - -import scala.annotation.tailrec -import scala.annotation.unchecked.uncheckedVariance -import scala.collection.generic.SerializeEnd -import scala.collection.mutable.{ArrayBuffer, StringBuilder} -import scala.language.implicitConversions -import Stream.cons - -@deprecated("Use LazyList (which is fully lazy) instead of Stream (which has a lazy tail only)", "2.13.0") -@SerialVersionUID(3L) -sealed abstract class Stream[+A] extends AbstractSeq[A] - with LinearSeq[A] - with LinearSeqOps[A, Stream, Stream[A]] - with IterableFactoryDefaults[A, Stream] - with Serializable { - def tail: Stream[A] - - /** Forces evaluation of the whole `Stream` and returns it. - * - * @note Often we use `Stream`s to represent an infinite set or series. If - * that's the case for your particular `Stream` then this function will never - * return and will probably crash the VM with an `OutOfMemory` exception. - * This function will not hang on a finite cycle, however. - * - * @return The fully realized `Stream`. - */ - def force: this.type - - override def iterableFactory: SeqFactory[Stream] = Stream - - override protected[this] def className: String = "Stream" - - /** Apply the given function `f` to each element of this linear sequence - * (while respecting the order of the elements). - * - * @param f The treatment to apply to each element. - * @note Overridden here as final to trigger tail-call optimization, which - * replaces 'this' with 'tail' at each iteration. This is absolutely - * necessary for allowing the GC to collect the underlying Stream as elements - * are consumed. - * @note This function will force the realization of the entire Stream - * unless the `f` throws an exception. - */ - @tailrec - override final def foreach[U](f: A => U): Unit = { - if (!this.isEmpty) { - f(head) - tail.foreach(f) - } - } - - @tailrec - override final def find(p: A => Boolean): Option[A] = { - if(isEmpty) None - else if(p(head)) Some(head) - else tail.find(p) - } - - override def take(n: Int): Stream[A] = { - if (n <= 0 || isEmpty) Stream.empty - else if (n == 1) new Stream.Cons(head, Stream.empty) - else new Stream.Cons(head, tail.take(n - 1)) - } - - /** Stream specialization of foldLeft which allows GC to collect along the - * way. - * - * @tparam B The type of value being accumulated. - * @param z The initial value seeded into the function `op`. - * @param op The operation to perform on successive elements of the `Stream`. - * @return The accumulated value from successive applications of `op`. - */ - @tailrec - override final def foldLeft[B](z: B)(op: (B, A) => B): B = { - if (this.isEmpty) z - else tail.foldLeft(op(z, head))(op) - } - - /** The stream resulting from the concatenation of this stream with the argument stream. - * @param rest The collection that gets appended to this stream - * @return The stream containing elements of this stream and the iterable object. - */ - @deprecated("The `append` operation has been renamed `lazyAppendedAll`", "2.13.0") - @inline final def append[B >: A](rest: => IterableOnce[B]): Stream[B] = lazyAppendedAll(rest) - - protected[this] def writeReplace(): AnyRef = - if(nonEmpty && tailDefined) new Stream.SerializationProxy[A](this) else this - - /** Prints elements of this stream one by one, separated by commas. */ - @deprecated(message = """Use print(stream.force.mkString(", ")) instead""", since = "2.13.0") - @inline def print(): Unit = Console.print(this.force.mkString(", ")) - - /** Prints elements of this stream one by one, separated by `sep`. - * @param sep The separator string printed between consecutive elements. - */ - @deprecated(message = "Use print(stream.force.mkString(sep)) instead", since = "2.13.0") - @inline def print(sep: String): Unit = Console.print(this.force.mkString(sep)) - - /** The stream resulting from the concatenation of this stream with the argument stream. - * - * @param suffix The collection that gets appended to this stream - * @return The stream containing elements of this stream and the iterable object. - */ - def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]): Stream[B] = - if (isEmpty) iterableFactory.from(suffix) else cons[B](head, tail.lazyAppendedAll(suffix)) - - override def scanLeft[B](z: B)(op: (B, A) => B): Stream[B] = - if (isEmpty) z +: iterableFactory.empty - else cons(z, tail.scanLeft(op(z, head))(op)) - - /** Stream specialization of reduceLeft which allows GC to collect - * along the way. - * - * @tparam B The type of value being accumulated. - * @param f The operation to perform on successive elements of the `Stream`. - * @return The accumulated value from successive applications of `f`. - */ - override final def reduceLeft[B >: A](f: (B, A) => B): B = { - if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft") - else { - var reducedRes: B = this.head - var left: Stream[A] = this.tail - while (!left.isEmpty) { - reducedRes = f(reducedRes, left.head) - left = left.tail - } - reducedRes - } - } - - override def partition(p: A => Boolean): (Stream[A], Stream[A]) = (filter(p(_)), filterNot(p(_))) - - override def filter(pred: A => Boolean): Stream[A] = filterImpl(pred, isFlipped = false) - - override def filterNot(pred: A => Boolean): Stream[A] = filterImpl(pred, isFlipped = true) - - private[immutable] def filterImpl(p: A => Boolean, isFlipped: Boolean): Stream[A] = { - // optimization: drop leading prefix of elems for which f returns false - // var rest = this dropWhile (!p(_)) - forget DRY principle - GC can't collect otherwise - var rest: Stream[A] = coll - while (rest.nonEmpty && p(rest.head) == isFlipped) rest = rest.tail - // private utility func to avoid `this` on stack (would be needed for the lazy arg) - if (rest.nonEmpty) Stream.filteredTail(rest, p, isFlipped) - else iterableFactory.empty - } - - /** A `collection.WithFilter` which allows GC of the head of stream during processing */ - override final def withFilter(p: A => Boolean): collection.WithFilter[A, Stream] = - Stream.withFilter(coll, p) - - override final def prepended[B >: A](elem: B): Stream[B] = cons(elem, coll) - - override final def map[B](f: A => B): Stream[B] = - if (isEmpty) iterableFactory.empty - else cons(f(head), tail.map(f)) - - @tailrec override final def collect[B](pf: PartialFunction[A, B]): Stream[B] = - if(isEmpty) Stream.empty - else { - var newHead: B = null.asInstanceOf[B] - val runWith = pf.runWith((b: B) => newHead = b) - if(runWith(head)) Stream.collectedTail(newHead, this, pf) - else tail.collect(pf) - } - - @tailrec override final def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = - if(isEmpty) None - else { - var newHead: B = null.asInstanceOf[B] - val runWith = pf.runWith((b: B) => newHead = b) - if(runWith(head)) Some(newHead) - else tail.collectFirst(pf) - } - - // optimisations are not for speed, but for functionality - // see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala) - override final def flatMap[B](f: A => IterableOnce[B]): Stream[B] = - if (isEmpty) iterableFactory.empty - else { - // establish !prefix.isEmpty || nonEmptyPrefix.isEmpty - var nonEmptyPrefix: Stream[A] = coll - var prefix = iterableFactory.from(f(nonEmptyPrefix.head)) - while (!nonEmptyPrefix.isEmpty && prefix.isEmpty) { - nonEmptyPrefix = nonEmptyPrefix.tail - if(!nonEmptyPrefix.isEmpty) - prefix = iterableFactory.from(f(nonEmptyPrefix.head)) - } - - if (nonEmptyPrefix.isEmpty) iterableFactory.empty - else prefix.lazyAppendedAll(nonEmptyPrefix.tail.flatMap(f)) - } - - override final def zip[B](that: collection.IterableOnce[B]): Stream[(A, B)] = - if (this.isEmpty || that.isEmpty) iterableFactory.empty - else { - val thatIterable = that match { - case that: collection.Iterable[B] => that - case _ => LazyList.from(that) - } - cons[(A, B)]((this.head, thatIterable.head), this.tail.zip(thatIterable.tail)) - } - - override final def zipWithIndex: Stream[(A, Int)] = this.zip(LazyList.from(0)) - - protected def tailDefined: Boolean - - /** Appends all elements of this $coll to a string builder using start, end, and separator strings. - * The written text begins with the string `start` and ends with the string `end`. - * Inside, the string representations (w.r.t. the method `toString`) - * of all elements of this $coll are separated by the string `sep`. - * - * Undefined elements are represented with `"_"`, an undefined tail is represented with `"<not computed>"`, - * and cycles are represented with `"<cycle>"`. - * - * @param sb the string builder to which elements are appended. - * @param start the starting string. - * @param sep the separator string. - * @param end the ending string. - * @return the string builder `b` to which elements were appended. - */ - override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { - force - addStringNoForce(sb.underlying, start, sep, end) - sb - } - - private[this] def addStringNoForce(b: JStringBuilder, start: String, sep: String, end: String): JStringBuilder = { - b.append(start) - if (nonEmpty) { - b.append(head) - var cursor = this - def appendCursorElement(): Unit = b.append(sep).append(cursor.head) - if (tailDefined) { // If tailDefined, also !isEmpty - var scout = tail - if (cursor ne scout) { - cursor = scout - if (scout.tailDefined) { - scout = scout.tail - // Use 2x 1x iterator trick for cycle detection; slow iterator can add strings - while ((cursor ne scout) && scout.tailDefined) { - appendCursorElement() - cursor = cursor.tail - scout = scout.tail - if (scout.tailDefined) scout = scout.tail - } - } - } - if (!scout.tailDefined) { // Not a cycle, scout hit an end - while (cursor ne scout) { - appendCursorElement() - cursor = cursor.tail - } - if (cursor.nonEmpty) { - appendCursorElement() - } - } - else { - // Cycle. - // If we have a prefix of length P followed by a cycle of length C, - // the scout will be at position (P%C) in the cycle when the cursor - // enters it at P. They'll then collide when the scout advances another - // C - (P%C) ahead of the cursor. - // If we run the scout P farther, then it will be at the start of - // the cycle: (C - (P%C) + (P%C)) == C == 0. So if another runner - // starts at the beginning of the prefix, they'll collide exactly at - // the start of the loop. - var runner = this - var k = 0 - while (runner ne scout) { - runner = runner.tail - scout = scout.tail - k += 1 - } - // Now runner and scout are at the beginning of the cycle. Advance - // cursor, adding to string, until it hits; then we'll have covered - // everything once. If cursor is already at beginning, we'd better - // advance one first unless runner didn't go anywhere (in which case - // we've already looped once). - if ((cursor eq scout) && (k > 0)) { - appendCursorElement() - cursor = cursor.tail - } - while (cursor ne scout) { - appendCursorElement() - cursor = cursor.tail - } - } - } - if (cursor.nonEmpty) { - // Either undefined or cyclic; we can check with tailDefined - if (!cursor.tailDefined) b.append(sep).append("") - else b.append(sep).append("") - } - } - b.append(end) - } - - /** - * @return a string representation of this collection. Undefined elements are - * represented with `"_"`, an undefined tail is represented with `"<not computed>"`, - * and cycles are represented with `"<cycle>"` - * - * Examples: - * - * - `"Stream(_, <not computed>)"`, a non-empty stream, whose head has not been - * evaluated ; - * - `"Stream(_, 1, _, <not computed>)"`, a stream with at least three elements, - * the second one has been evaluated ; - * - `"Stream(1, 2, 3, <cycle>)"`, an infinite stream that contains - * a cycle at the fourth element. - */ - override def toString = addStringNoForce(new JStringBuilder(className), "(", ", ", ")").toString - - @deprecated("Check .knownSize instead of .hasDefiniteSize for more actionable information (see scaladoc for details)", "2.13.0") - override def hasDefiniteSize: Boolean = isEmpty || { - if (!tailDefined) false - else { - // Two-iterator trick (2x & 1x speed) for cycle detection. - var those = this - var these = tail - while (those ne these) { - if (these.isEmpty) return true - if (!these.tailDefined) return false - these = these.tail - if (these.isEmpty) return true - if (!these.tailDefined) return false - these = these.tail - if (those eq these) return false - those = those.tail - } - false // Cycle detected - } - } -} - -@deprecated("Use LazyList (which is fully lazy) instead of Stream (which has a lazy tail only)", "2.13.0") -@SerialVersionUID(3L) -object Stream extends SeqFactory[Stream] { - - /* !!! #11997 This `object cons` must be defined lexically *before* `class Cons` below. - * Otherwise it prevents Scala.js from building on Windows. - */ - /** An alternative way of building and matching Streams using Stream.cons(hd, tl). - */ - object cons { - /** A stream consisting of a given first element and remaining elements - * @param hd The first element of the result stream - * @param tl The remaining elements of the result stream - */ - def apply[A](hd: A, tl: => Stream[A]): Stream[A] = new Cons(hd, tl) - - /** Maps a stream to its head and tail */ - def unapply[A](xs: Stream[A]): Option[(A, Stream[A])] = #::.unapply(xs) - } - - //@SerialVersionUID(3L) //TODO Putting an annotation on Stream.empty causes a cyclic dependency in unpickling - object Empty extends Stream[Nothing] { - override def isEmpty: Boolean = true - override def head: Nothing = throw new NoSuchElementException("head of empty stream") - override def tail: Stream[Nothing] = throw new UnsupportedOperationException("tail of empty stream") - /** Forces evaluation of the whole `Stream` and returns it. - * - * @note Often we use `Stream`s to represent an infinite set or series. If - * that's the case for your particular `Stream` then this function will never - * return and will probably crash the VM with an `OutOfMemory` exception. - * This function will not hang on a finite cycle, however. - * - * @return The fully realized `Stream`. - */ - def force: this.type = this - override def knownSize: Int = 0 - protected def tailDefined: Boolean = false - } - - @SerialVersionUID(3L) - final class Cons[A](override val head: A, tl: => Stream[A]) extends Stream[A] { - override def isEmpty: Boolean = false - @volatile private[this] var tlVal: Stream[A] = _ - @volatile private[this] var tlGen = () => tl - protected def tailDefined: Boolean = tlGen eq null - override def tail: Stream[A] = { - if (!tailDefined) - synchronized { - if (!tailDefined) { - tlVal = tlGen() - tlGen = null - } - } - tlVal - } - - /** Forces evaluation of the whole `Stream` and returns it. - * - * @note Often we use `Stream`s to represent an infinite set or series. If - * that's the case for your particular `Stream` then this function will never - * return and will probably crash the VM with an `OutOfMemory` exception. - * This function will not hang on a finite cycle, however. - * - * @return The fully realized `Stream`. - */ - def force: this.type = { - // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one) - var these, those: Stream[A] = this - if (!these.isEmpty) these = these.tail - while (those ne these) { - if (these.isEmpty) return this - these = these.tail - if (these.isEmpty) return this - these = these.tail - if (these eq those) return this - those = those.tail - } - this - } - - } - - implicit def toDeferrer[A](l: => Stream[A]): Deferrer[A] = new Deferrer[A](() => l) - - final class Deferrer[A] private[Stream] (private val l: () => Stream[A]) extends AnyVal { - /** Construct a Stream consisting of a given first element followed by elements - * from another Stream. - */ - def #:: [B >: A](elem: B): Stream[B] = new Cons(elem, l()) - /** Construct a Stream consisting of the concatenation of the given Stream and - * another Stream. - */ - def #:::[B >: A](prefix: Stream[B]): Stream[B] = prefix lazyAppendedAll l() - } - - object #:: { - def unapply[A](s: Stream[A]): Option[(A, Stream[A])] = - if (s.nonEmpty) Some((s.head, s.tail)) else None - } - - def from[A](coll: collection.IterableOnce[A]): Stream[A] = coll match { - case coll: Stream[A] => coll - case _ => fromIterator(coll.iterator) - } - - /** - * @return A `Stream[A]` that gets its elements from the given `Iterator`. - * - * @param it Source iterator - * @tparam A type of elements - */ - // Note that the resulting `Stream` will be effectively iterable more than once because - // `Stream` memoizes its elements - def fromIterator[A](it: Iterator[A]): Stream[A] = - if (it.hasNext) { - new Stream.Cons(it.next(), fromIterator(it)) - } else Stream.Empty - - def empty[A]: Stream[A] = Empty - - override def newBuilder[A]: mutable.Builder[A, Stream[A]] = ArrayBuffer.newBuilder[A].mapResult(array => from(array)) - - private[immutable] def withFilter[A](l: Stream[A] @uncheckedVariance, p: A => Boolean): collection.WithFilter[A, Stream] = - new WithFilter[A](l, p) - - private[this] final class WithFilter[A](l: Stream[A] @uncheckedVariance, p: A => Boolean) extends collection.WithFilter[A, Stream] { - private[this] var s = l // set to null to allow GC after filtered - private[this] lazy val filtered: Stream[A] = { val f = s.filter(p); s = null.asInstanceOf[Stream[A]]; f } // don't set to null if throw during filter - def map[B](f: A => B): Stream[B] = filtered.map(f) - def flatMap[B](f: A => IterableOnce[B]): Stream[B] = filtered.flatMap(f) - def foreach[U](f: A => U): Unit = filtered.foreach(f) - def withFilter(q: A => Boolean): collection.WithFilter[A, Stream] = new WithFilter(filtered, q) - } - - /** An infinite Stream that repeatedly applies a given function to a start value. - * - * @param start the start value of the Stream - * @param f the function that's repeatedly applied - * @return the Stream returning the infinite sequence of values `start, f(start), f(f(start)), ...` - */ - def iterate[A](start: A)(f: A => A): Stream[A] = { - cons(start, iterate(f(start))(f)) - } - - /** - * Create an infinite Stream starting at `start` and incrementing by - * step `step`. - * - * @param start the start value of the Stream - * @param step the increment value of the Stream - * @return the Stream starting at value `start`. - */ - def from(start: Int, step: Int): Stream[Int] = - cons(start, from(start + step, step)) - - /** - * Create an infinite Stream starting at `start` and incrementing by `1`. - * - * @param start the start value of the Stream - * @return the Stream starting at value `start`. - */ - def from(start: Int): Stream[Int] = from(start, 1) - - /** - * Create an infinite Stream containing the given element expression (which - * is computed for each occurrence). - * - * @param elem the element composing the resulting Stream - * @return the Stream containing an infinite number of elem - */ - def continually[A](elem: => A): Stream[A] = cons(elem, continually(elem)) - - - private[Stream] def filteredTail[A](stream: Stream[A] @uncheckedVariance, p: A => Boolean, isFlipped: Boolean) = { - cons(stream.head, stream.tail.filterImpl(p, isFlipped)) - } - - private[Stream] def collectedTail[A, B](head: B, stream: Stream[A] @uncheckedVariance, pf: PartialFunction[A, B]) = { - cons(head, stream.tail.collect(pf)) - } - - /** This serialization proxy is used for Streams which start with a sequence of evaluated cons cells. - * The forced sequence is serialized in a compact, sequential format, followed by the unevaluated tail, which uses - * standard Java serialization to store the complete structure of unevaluated thunks. This allows the serialization - * of long evaluated streams without exhausting the stack through recursive serialization of cons cells. - */ - @SerialVersionUID(3L) - class SerializationProxy[A](@transient protected var coll: Stream[A]) extends Serializable { - - private[this] def writeObject(out: ObjectOutputStream): Unit = { - out.defaultWriteObject() - var these = coll - while(these.nonEmpty && these.tailDefined) { - out.writeObject(these.head) - these = these.tail - } - out.writeObject(SerializeEnd) - out.writeObject(these) - } - - private[this] def readObject(in: ObjectInputStream): Unit = { - in.defaultReadObject() - val init = new ArrayBuffer[A] - var initRead = false - while (!initRead) in.readObject match { - case SerializeEnd => initRead = true - case a => init += a.asInstanceOf[A] - } - val tail = in.readObject().asInstanceOf[Stream[A]] - coll = (init ++: tail) - } - - protected[this] def readResolve(): Any = coll - } -} diff --git a/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala b/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala index db5192edc36c..b1e4622971fb 100644 --- a/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala +++ b/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala @@ -13,6 +13,8 @@ package scala package collection package immutable +import language.experimental.captureChecking +import annotation.unchecked.uncheckedCaptures /** * Trait that overrides operations to take advantage of strict builders. @@ -23,11 +25,11 @@ trait StrictOptimizedSeqOps[+A, +CC[_], +C] with collection.StrictOptimizedSeqOps[A, CC, C] with StrictOptimizedIterableOps[A, CC, C] { - override def distinctBy[B](f: A => B): C = { + override def distinctBy[B](f: A -> B): C = { if (lengthCompare(1) <= 0) coll else { val builder = newSpecificBuilder - val seen = mutable.HashSet.empty[B] + val seen = mutable.HashSet.empty[B @uncheckedCaptures] val it = this.iterator var different = false while (it.hasNext) { @@ -57,7 +59,7 @@ trait StrictOptimizedSeqOps[+A, +CC[_], +C] b.result() } - override def patch[B >: A](from: Int, other: IterableOnce[B], replaced: Int): CC[B] = { + override def patch[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): CC[B] = { val b = iterableFactory.newBuilder[B] var i = 0 val it = iterator diff --git a/tests/pos-special/stdlib/collection/immutable/TreeMap.scala b/tests/pos-special/stdlib/collection/immutable/TreeMap.scala index a51c7b9e7bf6..ff01ad7806ec 100644 --- a/tests/pos-special/stdlib/collection/immutable/TreeMap.scala +++ b/tests/pos-special/stdlib/collection/immutable/TreeMap.scala @@ -20,6 +20,8 @@ import scala.collection.generic.DefaultSerializable import scala.collection.immutable.{RedBlackTree => RB} import scala.collection.mutable.ReusableBuilder import scala.runtime.AbstractFunction2 +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** An immutable SortedMap whose values are stored in a red-black tree. * @@ -138,7 +140,7 @@ final class TreeMap[K, +V] private (private val tree: RB.Tree[K, V])(implicit va def updated[V1 >: V](key: K, value: V1): TreeMap[K, V1] = newMapOrSelf(RB.update(tree, key, value, overwrite = true)) - override def concat[V1 >: V](that: collection.IterableOnce[(K, V1)]): TreeMap[K, V1] = + override def concat[V1 >: V](that: collection.IterableOnce[(K, V1)]^): TreeMap[K, V1] = newMapOrSelf(that match { case tm: TreeMap[K, V] @unchecked if ordering == tm.ordering => RB.union(tree, tm.tree) @@ -158,7 +160,7 @@ final class TreeMap[K, +V] private (private val tree: RB.Tree[K, V])(implicit va adder.finalTree }) - override def removedAll(keys: IterableOnce[K]): TreeMap[K, V] = keys match { + override def removedAll(keys: IterableOnce[K]^): TreeMap[K, V] = keys match { case ts: TreeSet[K] if ordering == ts.ordering => newMapOrSelf(RB.difference(tree, ts.tree)) case _ => super.removedAll(keys) @@ -269,7 +271,7 @@ final class TreeMap[K, +V] private (private val tree: RB.Tree[K, V])(implicit va private final class Adder[B1 >: V] extends RB.MapHelper[K, B1] with Function1[(K, B1), Unit] { - private var currentMutableTree: RB.Tree[K,B1] = tree0 + private var currentMutableTree: RB.Tree[K,B1] @uncheckedCaptures = tree0 def finalTree = beforePublish(currentMutableTree) override def apply(kv: (K, B1)): Unit = { currentMutableTree = mutableUpd(currentMutableTree, kv._1, kv._2) @@ -299,7 +301,7 @@ object TreeMap extends SortedMapFactory[TreeMap] { def empty[K : Ordering, V]: TreeMap[K, V] = new TreeMap() - def from[K, V](it: IterableOnce[(K, V)])(implicit ordering: Ordering[K]): TreeMap[K, V] = + def from[K, V](it: IterableOnce[(K, V)]^)(implicit ordering: Ordering[K]): TreeMap[K, V] = it match { case tm: TreeMap[K, V] if ordering == tm.ordering => tm case sm: scala.collection.SortedMap[K, V] if ordering == sm.ordering => @@ -320,7 +322,7 @@ object TreeMap extends SortedMapFactory[TreeMap] { extends RB.MapHelper[K, V] with ReusableBuilder[(K, V), TreeMap[K, V]] { type Tree = RB.Tree[K, V] - private var tree:Tree = null + private var tree:Tree @uncheckedCaptures = null def addOne(elem: (K, V)): this.type = { tree = mutableUpd(tree, elem._1, elem._2) @@ -329,7 +331,7 @@ object TreeMap extends SortedMapFactory[TreeMap] { private object adder extends AbstractFunction2[K, V, Unit] { // we cache tree to avoid the outer access to tree // in the hot path (apply) - private[this] var accumulator :Tree = null + private[this] var accumulator: Tree @uncheckedCaptures = null def addForEach(hasForEach: collection.Map[K, V]): Unit = { accumulator = tree hasForEach.foreachEntry(this) @@ -343,7 +345,7 @@ object TreeMap extends SortedMapFactory[TreeMap] { } } - override def addAll(xs: IterableOnce[(K, V)]): this.type = { + override def addAll(xs: IterableOnce[(K, V)]^): this.type = { xs match { // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++= // for the moment we have to force immutability before the union diff --git a/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala b/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala index 80bafb1cf3be..91233669e5ca 100644 --- a/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala +++ b/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala @@ -15,6 +15,8 @@ package collection package immutable import scala.annotation.tailrec +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** This class implements an immutable map that preserves order using * a hash map for the key to value mapping to provide efficient lookup, @@ -204,7 +206,7 @@ final class TreeSeqMap[K, +V] private ( new TreeSeqMap(ong, mng, ordinal, orderedBy) } else { // Populate with builder otherwise - val bdr = newBuilder[K, V](orderedBy) + val bdr = newBuilder[K @uncheckedCaptures, V @uncheckedCaptures](orderedBy) val iter = ordering.iterator var i = 0 while (i < f) { @@ -222,7 +224,7 @@ final class TreeSeqMap[K, +V] private ( } override def map[K2, V2](f: ((K, V)) => (K2, V2)): TreeSeqMap[K2, V2] = { - val bdr = newBuilder[K2, V2](orderedBy) + val bdr = newBuilder[K2 @uncheckedCaptures, V2 @uncheckedCaptures](orderedBy) val iter = ordering.iterator while (iter.hasNext) { val k = iter.next() @@ -233,8 +235,8 @@ final class TreeSeqMap[K, +V] private ( bdr.result() } - override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): TreeSeqMap[K2, V2] = { - val bdr = newBuilder[K2, V2](orderedBy) + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): TreeSeqMap[K2, V2] = { + val bdr = newBuilder[K2 @uncheckedCaptures, V2 @uncheckedCaptures](orderedBy) val iter = ordering.iterator while (iter.hasNext) { val k = iter.next() @@ -249,7 +251,7 @@ final class TreeSeqMap[K, +V] private ( } override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): TreeSeqMap[K2, V2] = { - val bdr = newBuilder[K2, V2](orderedBy) + val bdr = newBuilder[K2 @uncheckedCaptures, V2 @uncheckedCaptures](orderedBy) val iter = ordering.iterator while (iter.hasNext) { val k = iter.next() @@ -259,7 +261,7 @@ final class TreeSeqMap[K, +V] private ( bdr.result() } - override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): TreeSeqMap[K, V2] = { + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): TreeSeqMap[K, V2] = { var ong: Ordering[K] = ordering var mng: Mapping[K, V2] = mapping var ord = increment(ordinal) @@ -302,7 +304,7 @@ object TreeSeqMap extends MapFactory[TreeSeqMap] { else EmptyByInsertion }.asInstanceOf[TreeSeqMap[K, V]] - def from[K, V](it: collection.IterableOnce[(K, V)]): TreeSeqMap[K, V] = + def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^): TreeSeqMap[K, V] = it match { case om: TreeSeqMap[K, V] => om case _ => (newBuilder[K, V] ++= it).result() @@ -310,10 +312,10 @@ object TreeSeqMap extends MapFactory[TreeSeqMap] { @inline private def increment(ord: Int) = if (ord == Int.MaxValue) Int.MinValue else ord + 1 - def newBuilder[K, V]: mutable.Builder[(K, V), TreeSeqMap[K, V]] = newBuilder(OrderBy.Insertion) - def newBuilder[K, V](orderedBy: OrderBy): mutable.Builder[(K, V), TreeSeqMap[K, V]] = new Builder[K, V](orderedBy) + def newBuilder[sealed K, sealed V]: mutable.Builder[(K, V), TreeSeqMap[K, V]] = newBuilder(OrderBy.Insertion) + def newBuilder[sealed K, sealed V](orderedBy: OrderBy): mutable.Builder[(K, V), TreeSeqMap[K, V]] = new Builder[K, V](orderedBy) - final class Builder[K, V](orderedBy: OrderBy) extends mutable.Builder[(K, V), TreeSeqMap[K, V]] { + final class Builder[sealed K, sealed V](orderedBy: OrderBy) extends mutable.Builder[(K, V), TreeSeqMap[K, V]] { private[this] val bdr = new MapBuilderImpl[K, (Int, V)] private[this] var ong = Ordering.empty[K] private[this] var ord = 0 @@ -435,7 +437,7 @@ object TreeSeqMap extends MapFactory[TreeSeqMap] { protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = sb ++= s"${prefix}Tip(${toBinaryString(ord)} -> $value)\n" } - final case class Bin[+T](prefix: Int, mask: Int, left: Ordering[T], var right: Ordering[T] @scala.annotation.unchecked.uncheckedVariance) extends Ordering[T] { + final case class Bin[+T](prefix: Int, mask: Int, left: Ordering[T], var right: Ordering[T @uncheckedCaptures] @scala.annotation.unchecked.uncheckedVariance) extends Ordering[T] { def bin[S](left: Ordering[S], right: Ordering[S]): Ordering[S] = { if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[Bin[S]] else Bin[S](prefix, mask, left, right) @@ -607,7 +609,7 @@ object TreeSeqMap extends MapFactory[TreeSeqMap] { } final def splitAt(n: Int): (Ordering[T], Ordering[T]) = { - var rear = Ordering.empty[T] + var rear: Ordering[T @uncheckedCaptures] = Ordering.empty[T] var i = n (modifyOrRemove { (o, v) => i -= 1 diff --git a/tests/pos-special/stdlib/collection/immutable/TreeSet.scala b/tests/pos-special/stdlib/collection/immutable/TreeSet.scala index f0be91b72acc..c4241b818c38 100644 --- a/tests/pos-special/stdlib/collection/immutable/TreeSet.scala +++ b/tests/pos-special/stdlib/collection/immutable/TreeSet.scala @@ -19,7 +19,8 @@ import scala.collection.generic.DefaultSerializable import scala.collection.mutable.ReusableBuilder import scala.collection.immutable.{RedBlackTree => RB} import scala.runtime.AbstractFunction1 - +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** This class implements immutable sorted sets using a tree. * @@ -239,7 +240,7 @@ object TreeSet extends SortedIterableFactory[TreeSet] { def empty[A: Ordering]: TreeSet[A] = new TreeSet[A] - def from[E](it: scala.collection.IterableOnce[E])(implicit ordering: Ordering[E]): TreeSet[E] = + def from[E](it: scala.collection.IterableOnce[E]^)(implicit ordering: Ordering[E]): TreeSet[E] = it match { case ts: TreeSet[E] if ordering == ts.ordering => ts case ss: scala.collection.SortedSet[E] if ordering == ss.ordering => @@ -262,14 +263,14 @@ object TreeSet extends SortedIterableFactory[TreeSet] { extends RB.SetHelper[A] with ReusableBuilder[A, TreeSet[A]] { type Tree = RB.Tree[A, Any] - private [this] var tree:RB.Tree[A, Any] = null + private [this] var tree:RB.Tree[A @uncheckedCaptures, Any] = null override def addOne(elem: A): this.type = { tree = mutableUpd(tree, elem) this } - override def addAll(xs: IterableOnce[A]): this.type = { + override def addAll(xs: IterableOnce[A]^): this.type = { xs match { // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++= // for the moment we have to force immutability before the union diff --git a/tests/pos-special/stdlib/collection/immutable/Vector.scala b/tests/pos-special/stdlib/collection/immutable/Vector.scala index aa3fac5acd69..d9d33add512d 100644 --- a/tests/pos-special/stdlib/collection/immutable/Vector.scala +++ b/tests/pos-special/stdlib/collection/immutable/Vector.scala @@ -24,6 +24,8 @@ import scala.collection.generic.DefaultSerializable import scala.collection.immutable.VectorInline._ import scala.collection.immutable.VectorStatics._ import scala.collection.mutable.ReusableBuilder +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** $factoryInfo @@ -35,7 +37,7 @@ object Vector extends StrictOptimizedSeqFactory[Vector] { def empty[A]: Vector[A] = Vector0 - def from[E](it: collection.IterableOnce[E]): Vector[E] = + def from[E](it: collection.IterableOnce[E]^): Vector[E] = it match { case v: Vector[E] => v case _ => @@ -191,21 +193,21 @@ sealed abstract class Vector[+A] private[immutable] (private[immutable] final va override def updated[B >: A](index: Int, elem: B): Vector[B] = super.updated(index, elem) override def appended[B >: A](elem: B): Vector[B] = super.appended(elem) override def prepended[B >: A](elem: B): Vector[B] = super.prepended(elem) - override def prependedAll[B >: A](prefix: collection.IterableOnce[B]): Vector[B] = { + override def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): Vector[B] = { val k = prefix.knownSize if (k == 0) this else if (k < 0) super.prependedAll(prefix) else prependedAll0(prefix, k) } - override final def appendedAll[B >: A](suffix: collection.IterableOnce[B]): Vector[B] = { + override final def appendedAll[B >: A](suffix: collection.IterableOnce[B]^): Vector[B] = { val k = suffix.knownSize if (k == 0) this else if (k < 0) super.appendedAll(suffix) else appendedAll0(suffix, k) } - protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = { + protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = { // k >= 0, k = prefix.knownSize val tinyAppendLimit = 4 + vectorSliceCount if (k < tinyAppendLimit /*|| k < (this.size >>> Log2ConcatFaster)*/) { @@ -223,11 +225,11 @@ sealed abstract class Vector[+A] private[immutable] (private[immutable] final va } else super.prependedAll(prefix) } - protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { // k >= 0, k = suffix.knownSize val tinyAppendLimit = 4 + vectorSliceCount if (k < tinyAppendLimit) { - var v: Vector[B] = this + var v: Vector[B @uncheckedCaptures] = this suffix match { case it: Iterable[_] => it.asInstanceOf[Iterable[B]].foreach(x => v = v.appended(x)) case _ => suffix.iterator.foreach(x => v = v.appended(x)) @@ -263,7 +265,7 @@ sealed abstract class Vector[+A] private[immutable] (private[immutable] final va /** Length of all slices up to and including index */ protected[immutable] def vectorSlicePrefixLength(idx: Int): Int - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = iterator.copyToArray(xs, start, len) + override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = iterator.copyToArray(xs, start, len) override def toVector: Vector[A] = this @@ -369,10 +371,10 @@ private object Vector0 extends BigVector[Nothing](empty1, empty1, 0) { } } - override protected[this]def prependedAll0[B >: Nothing](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + override protected[this]def prependedAll0[B >: Nothing](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = Vector.from(prefix) - override protected[this]def appendedAll0[B >: Nothing](suffix: collection.IterableOnce[B], k: Int): Vector[B] = + override protected[this]def appendedAll0[B >: Nothing](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = Vector.from(suffix) override protected[this] def ioob(index: Int): IndexOutOfBoundsException = @@ -423,13 +425,13 @@ private final class Vector1[+A](_data1: Arr1) extends VectorImpl[A](_data1) { protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = prefix1 protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = prefix1.length - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = prepend1IfSpace(prefix1, prefix) match { case null => super.prependedAll0(prefix, k) case data1b => new Vector1(data1b) } - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { val data1b = append1IfSpace(prefix1, suffix) if(data1b ne null) new Vector1(data1b) else super.appendedAll0(suffix, k) @@ -518,7 +520,7 @@ private final class Vector2[+A](_prefix1: Arr1, private[immutable] val len1: Int case 2 => length0 } - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = prepend1IfSpace(prefix1, prefix) match { case null => super.prependedAll0(prefix, k) case prefix1b => @@ -529,7 +531,7 @@ private final class Vector2[+A](_prefix1: Arr1, private[immutable] val len1: Int ) } - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { val suffix1b = append1IfSpace(suffix1, suffix) if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) else super.appendedAll0(suffix, k) @@ -640,7 +642,7 @@ private final class Vector3[+A](_prefix1: Arr1, private[immutable] val len1: Int case 4 => length0 } - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = prepend1IfSpace(prefix1, prefix) match { case null => super.prependedAll0(prefix, k) case prefix1b => @@ -652,7 +654,7 @@ private final class Vector3[+A](_prefix1: Arr1, private[immutable] val len1: Int ) } - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { val suffix1b = append1IfSpace(suffix1, suffix) if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) else super.appendedAll0(suffix, k) @@ -783,7 +785,7 @@ private final class Vector4[+A](_prefix1: Arr1, private[immutable] val len1: Int case 6 => length0 } - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = prepend1IfSpace(prefix1, prefix) match { case null => super.prependedAll0(prefix, k) case prefix1b => @@ -796,7 +798,7 @@ private final class Vector4[+A](_prefix1: Arr1, private[immutable] val len1: Int ) } - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { val suffix1b = append1IfSpace(suffix1, suffix) if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) else super.appendedAll0(suffix, k) @@ -947,7 +949,7 @@ private final class Vector5[+A](_prefix1: Arr1, private[immutable] val len1: Int case 8 => length0 } - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = prepend1IfSpace(prefix1, prefix) match { case null => super.prependedAll0(prefix, k) case prefix1b => @@ -961,7 +963,7 @@ private final class Vector5[+A](_prefix1: Arr1, private[immutable] val len1: Int ) } - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { val suffix1b = append1IfSpace(suffix1, suffix) if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) else super.appendedAll0(suffix, k) @@ -1132,7 +1134,7 @@ private final class Vector6[+A](_prefix1: Arr1, private[immutable] val len1: Int case 10 => length0 } - override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B], k: Int): Vector[B] = + override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = prepend1IfSpace(prefix1, prefix) match { case null => super.prependedAll0(prefix, k) case prefix1b => @@ -1147,7 +1149,7 @@ private final class Vector6[+A](_prefix1: Arr1, private[immutable] val len1: Int ) } - override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B], k: Int): Vector[B] = { + override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = { val suffix1b = append1IfSpace(suffix1, suffix) if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length) else super.appendedAll0(suffix, k) @@ -1814,7 +1816,7 @@ final class VectorBuilder[A] extends ReusableBuilder[A, Vector[A]] { this } - override def addAll(xs: IterableOnce[A]): this.type = xs match { + override def addAll(xs: IterableOnce[A]^): this.type = xs match { case v: Vector[_] => if(len1 == 0 && lenRest == 0 && !prefixIsRightAligned) initFrom(v) else addVector(v.asInstanceOf[Vector[A]]) @@ -2183,7 +2185,7 @@ private object VectorStatics { ac.asInstanceOf[Array[T]] } - final def prepend1IfSpace(prefix1: Arr1, xs: IterableOnce[_]): Arr1 = xs match { + final def prepend1IfSpace(prefix1: Arr1, xs: IterableOnce[_]^): Arr1 = xs match { case it: Iterable[_] => if(it.sizeCompare(WIDTH-prefix1.length) <= 0) { it.size match { @@ -2206,7 +2208,7 @@ private object VectorStatics { } else null } - final def append1IfSpace(suffix1: Arr1, xs: IterableOnce[_]): Arr1 = xs match { + final def append1IfSpace(suffix1: Arr1, xs: IterableOnce[_]^): Arr1 = xs match { case it: Iterable[_] => if(it.sizeCompare(WIDTH-suffix1.length) <= 0) { it.size match { @@ -2391,7 +2393,7 @@ private final class NewVectorIterator[A](v: Vector[A], private[this] var totalLe take(_until) } - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { val xsLen = xs.length val total = IterableOnce.elemsToCopyToArray(knownSize, xsLen, start, len) var copied = 0 @@ -2466,7 +2468,7 @@ private class LongVectorStepper(it: NewVectorIterator[Long]) // The following definitions are needed for binary compatibility with ParVector private[collection] class VectorIterator[+A](_startIndex: Int, private[this] var endIndex: Int) extends AbstractIterator[A] { - private[immutable] var it: NewVectorIterator[A @uncheckedVariance] = _ + private[immutable] var it: NewVectorIterator[A @uncheckedVariance @uncheckedCaptures] = _ def hasNext: Boolean = it.hasNext def next(): A = it.next() private[collection] def remainingElementCount: Int = it.size diff --git a/tests/pos-special/stdlib/collection/immutable/VectorMap.scala b/tests/pos-special/stdlib/collection/immutable/VectorMap.scala index cd8cf06c5c68..0860a0b47f28 100644 --- a/tests/pos-special/stdlib/collection/immutable/VectorMap.scala +++ b/tests/pos-special/stdlib/collection/immutable/VectorMap.scala @@ -15,6 +15,8 @@ package collection package immutable import scala.annotation.tailrec +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** This class implements immutable maps using a vector/map-based data structure, which preserves insertion order. * @@ -58,7 +60,7 @@ final class VectorMap[K, +V] private ( } } - override def withDefault[V1 >: V](d: K => V1): Map[K, V1] = + override def withDefault[V1 >: V](d: K -> V1): Map[K, V1] = new Map.WithDefault(this, d) override def withDefaultValue[V1 >: V](d: V1): Map[K, V1] = @@ -229,7 +231,7 @@ object VectorMap extends MapFactory[VectorMap] { def empty[K, V]: VectorMap[K, V] = EmptyMap.asInstanceOf[VectorMap[K, V]] - def from[K, V](it: collection.IterableOnce[(K, V)]): VectorMap[K, V] = + def from[K, V](it: collection.IterableOnce[(K, V)]^): VectorMap[K, V] = it match { case vm: VectorMap[K, V] => vm case _ => (newBuilder[K, V] ++= it).result() @@ -241,7 +243,7 @@ object VectorMap extends MapFactory[VectorMap] { private[immutable] final class VectorMapBuilder[K, V] extends mutable.Builder[(K, V), VectorMap[K, V]] { private[this] val vectorBuilder = new VectorBuilder[K] private[this] val mapBuilder = new MapBuilderImpl[K, (Int, V)] - private[this] var aliased: VectorMap[K, V] = _ + private[this] var aliased: VectorMap[K, V] @uncheckedCaptures = _ // OK since VectorMapBuilder is private override def clear(): Unit = { vectorBuilder.clear() diff --git a/tests/pos-special/stdlib/collection/immutable/WrappedString.scala b/tests/pos-special/stdlib/collection/immutable/WrappedString.scala index f2fdb8e3c32e..47fe769c81ef 100644 --- a/tests/pos-special/stdlib/collection/immutable/WrappedString.scala +++ b/tests/pos-special/stdlib/collection/immutable/WrappedString.scala @@ -17,6 +17,7 @@ import scala.Predef.{wrapString => _, assert} import scala.collection.Stepper.EfficientSplit import scala.collection.convert.impl.CharStringStepper import scala.collection.mutable.{Builder, StringBuilder} +import language.experimental.captureChecking /** * This class serves as a wrapper augmenting `String`s with all the operations @@ -34,11 +35,12 @@ import scala.collection.mutable.{Builder, StringBuilder} @SerialVersionUID(3L) final class WrappedString(private val self: String) extends AbstractSeq[Char] with IndexedSeq[Char] with IndexedSeqOps[Char, IndexedSeq, WrappedString] - with Serializable { + with Serializable + with Pure { def apply(i: Int): Char = self.charAt(i) - override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]): WrappedString = WrappedString.fromSpecific(coll) + override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]^): WrappedString = WrappedString.fromSpecific(coll) override protected def newSpecificBuilder: Builder[Char, WrappedString] = WrappedString.newBuilder override def empty: WrappedString = WrappedString.empty @@ -65,13 +67,13 @@ final class WrappedString(private val self: String) extends AbstractSeq[Char] wi r.asInstanceOf[S with EfficientSplit] } - override def startsWith[B >: Char](that: IterableOnce[B], offset: Int = 0): Boolean = + override def startsWith[B >: Char](that: IterableOnce[B]^, offset: Int = 0): Boolean = that match { case s: WrappedString => self.startsWith(s.self, offset) case _ => super.startsWith(that, offset) } - override def endsWith[B >: Char](that: collection.Iterable[B]): Boolean = + override def endsWith[B >: Char](that: collection.Iterable[B]^): Boolean = that match { case s: WrappedString => self.endsWith(s.self) case _ => super.endsWith(that) @@ -88,7 +90,7 @@ final class WrappedString(private val self: String) extends AbstractSeq[Char] wi case _ => super.lastIndexOf(elem, end) } - override def copyToArray[B >: Char](xs: Array[B], start: Int, len: Int): Int = + override def copyToArray[sealed B >: Char](xs: Array[B], start: Int, len: Int): Int = (xs: Any) match { case chs: Array[Char] => val copied = IterableOnce.elemsToCopyToArray(length, chs.length, start, len) @@ -97,13 +99,13 @@ final class WrappedString(private val self: String) extends AbstractSeq[Char] wi case _ => super.copyToArray(xs, start, len) } - override def appendedAll[B >: Char](suffix: IterableOnce[B]): IndexedSeq[B] = + override def appendedAll[B >: Char](suffix: IterableOnce[B]^): IndexedSeq[B] = suffix match { case s: WrappedString => new WrappedString(self concat s.self) case _ => super.appendedAll(suffix) } - override def sameElements[B >: Char](o: IterableOnce[B]) = o match { + override def sameElements[B >: Char](o: IterableOnce[B]^) = o match { case s: WrappedString => self == s.self case _ => super.sameElements(o) } @@ -123,7 +125,7 @@ final class WrappedString(private val self: String) extends AbstractSeq[Char] wi */ @SerialVersionUID(3L) object WrappedString extends SpecificIterableFactory[Char, WrappedString] { - def fromSpecific(it: IterableOnce[Char]): WrappedString = { + def fromSpecific(it: IterableOnce[Char]^): WrappedString = { val b = newBuilder val s = it.knownSize if(s >= 0) b.sizeHint(s) diff --git a/tests/pos-special/stdlib/collection/immutable/package.scala b/tests/pos-special/stdlib/collection/immutable/package.scala index 8458429727e8..985ef22859be 100644 --- a/tests/pos-special/stdlib/collection/immutable/package.scala +++ b/tests/pos-special/stdlib/collection/immutable/package.scala @@ -11,7 +11,7 @@ */ package scala.collection - +import language.experimental.captureChecking package object immutable { type StringOps = scala.collection.StringOps diff --git a/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala b/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala index c02a10770696..a6413649e219 100644 --- a/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala @@ -17,6 +17,8 @@ package mutable import scala.annotation.nowarn import scala.collection.generic.DefaultSerializationProxy import scala.language.implicitConversions +import language.experimental.captureChecking + /** This class implements mutable maps with `AnyRef` keys based on a hash table with open addressing. * @@ -41,7 +43,7 @@ import scala.language.implicitConversions * rapidly as 2^30^ is approached. * */ -class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initialBufferSize: Int, initBlank: Boolean) +class AnyRefMap[K <: AnyRef, sealed V] private[collection] (defaultEntry: K -> V, initialBufferSize: Int, initBlank: Boolean) extends AbstractMap[K, V] with MapOps[K, V, Map, AnyRefMap[K, V]] with StrictOptimizedIterableOps[(K, V), Iterable, AnyRefMap[K, V]] @@ -51,7 +53,7 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi def this() = this(AnyRefMap.exceptionDefault, 16, true) /** Creates a new `AnyRefMap` that returns default values according to a supplied key-value mapping. */ - def this(defaultEntry: K => V) = this(defaultEntry, 16, true) + def this(defaultEntry: K -> V) = this(defaultEntry, 16, true) /** Creates a new `AnyRefMap` with an initial buffer of specified size. * @@ -61,7 +63,7 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi def this(initialBufferSize: Int) = this(AnyRefMap.exceptionDefault, initialBufferSize, true) /** Creates a new `AnyRefMap` with specified default values and initial buffer size. */ - def this(defaultEntry: K => V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) + def this(defaultEntry: K -> V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) private[this] var mask = 0 private[this] var _size = 0 @@ -87,7 +89,7 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi mask = m; _size = sz; _vacant = vc; _hashes = hz; _keys = kz; _values = vz } - override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]): AnyRefMap[K,V] = { + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): AnyRefMap[K,V] = { var sz = coll.knownSize if(sz < 0) sz = 4 val arm = new AnyRefMap[K, V](sz * 2) @@ -393,24 +395,24 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi } @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") - override def + [V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = AnyRefMap.from(new View.Appended(this, kv)) + override def + [sealed V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = AnyRefMap.from(new View.Appended(this, kv)) @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): AnyRefMap[K, V1] = { + override def + [sealed V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): AnyRefMap[K, V1] = { val m = this + elem1 + elem2 if(elems.isEmpty) m else m.concat(elems) } - override def concat[V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]): AnyRefMap[K, V2] = { + override def concat[sealed V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]^): AnyRefMap[K, V2] = { val arm = clone().asInstanceOf[AnyRefMap[K, V2]] xs.iterator.foreach(kv => arm += kv) arm } - override def ++[V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]): AnyRefMap[K, V2] = concat(xs) + override def ++[sealed V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]^): AnyRefMap[K, V2] = concat(xs) @deprecated("Use m.clone().addOne(k,v) instead of m.updated(k, v)", "2.13.0") - override def updated[V1 >: V](key: K, value: V1): AnyRefMap[K, V1] = + override def updated[sealed V1 >: V](key: K, value: V1): AnyRefMap[K, V1] = clone().asInstanceOf[AnyRefMap[K, V1]].addOne(key, value) private[this] def foreachElement[A,B](elems: Array[AnyRef], f: A => B): Unit = { @@ -435,7 +437,7 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi * Unlike `mapValues`, this method generates a new * collection immediately. */ - def mapValuesNow[V1](f: V => V1): AnyRefMap[K, V1] = { + def mapValuesNow[sealed V1](f: V => V1): AnyRefMap[K, V1] = { val arm = new AnyRefMap[K,V1](AnyRefMap.exceptionDefault, 1, false) val hz = java.util.Arrays.copyOf(_hashes, _hashes.length) val kz = java.util.Arrays.copyOf(_keys, _keys.length) @@ -476,11 +478,11 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi } // The implicit dummy parameter is necessary to distinguish these methods from the base methods they overload (not override) - def map[K2 <: AnyRef, V2](f: ((K, V)) => (K2, V2))(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = + def map[K2 <: AnyRef, sealed V2](f: ((K, V)) => (K2, V2))(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = AnyRefMap.from(new View.Map(this, f)) - def flatMap[K2 <: AnyRef, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = + def flatMap[K2 <: AnyRef, sealed V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = AnyRefMap.from(new View.FlatMap(this, f)) - def collect[K2 <: AnyRef, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = + def collect[K2 <: AnyRef, sealed V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = strictOptimizedCollect(AnyRefMap.newBuilder[K2, V2], pf) override def clear(): Unit = { @@ -504,7 +506,7 @@ object AnyRefMap { private final val VacantBit = 0x40000000 private final val MissVacant = 0xC0000000 - private class ExceptionDefault extends (Any => Nothing) with Serializable { + private class ExceptionDefault extends (Any -> Nothing) with Serializable { def apply(k: Any): Nothing = throw new NoSuchElementException(if (k == null) "(null)" else k.toString) } private val exceptionDefault = new ExceptionDefault @@ -513,7 +515,7 @@ object AnyRefMap { * * This builder can be reused to create multiple instances. */ - final class AnyRefMapBuilder[K <: AnyRef, V] extends ReusableBuilder[(K, V), AnyRefMap[K, V]] { + final class AnyRefMapBuilder[K <: AnyRef, sealed V] extends ReusableBuilder[(K, V), AnyRefMap[K, V]] { private[collection] var elems: AnyRefMap[K, V] = new AnyRefMap[K, V] def addOne(entry: (K, V)): this.type = { elems += entry @@ -525,11 +527,11 @@ object AnyRefMap { } /** Creates a new `AnyRefMap` with zero or more key/value pairs. */ - def apply[K <: AnyRef, V](elems: (K, V)*): AnyRefMap[K, V] = buildFromIterableOnce(elems) + def apply[K <: AnyRef, sealed V](elems: (K, V)*): AnyRefMap[K, V] = buildFromIterableOnce(elems) - def newBuilder[K <: AnyRef, V]: ReusableBuilder[(K, V), AnyRefMap[K, V]] = new AnyRefMapBuilder[K, V] + def newBuilder[K <: AnyRef, sealed V]: ReusableBuilder[(K, V), AnyRefMap[K, V]] = new AnyRefMapBuilder[K, V] - private def buildFromIterableOnce[K <: AnyRef, V](elems: IterableOnce[(K, V)]): AnyRefMap[K, V] = { + private def buildFromIterableOnce[K <: AnyRef, sealed V](elems: IterableOnce[(K, V)]^): AnyRefMap[K, V] = { var sz = elems.knownSize if(sz < 0) sz = 4 val arm = new AnyRefMap[K, V](sz * 2) @@ -539,10 +541,10 @@ object AnyRefMap { } /** Creates a new empty `AnyRefMap`. */ - def empty[K <: AnyRef, V]: AnyRefMap[K, V] = new AnyRefMap[K, V] + def empty[K <: AnyRef, sealed V]: AnyRefMap[K, V] = new AnyRefMap[K, V] /** Creates a new empty `AnyRefMap` with the supplied default */ - def withDefault[K <: AnyRef, V](default: K => V): AnyRefMap[K, V] = new AnyRefMap[K, V](default) + def withDefault[K <: AnyRef, sealed V](default: K -> V): AnyRefMap[K, V] = new AnyRefMap[K, V](default) /** Creates a new `AnyRefMap` from an existing source collection. A source collection * which is already an `AnyRefMap` gets cloned. @@ -552,7 +554,7 @@ object AnyRefMap { * @tparam V the type of the values * @return a new `AnyRefMap` with the elements of `source` */ - def from[K <: AnyRef, V](source: IterableOnce[(K, V)]): AnyRefMap[K, V] = source match { + def from[K <: AnyRef, sealed V](source: IterableOnce[(K, V)]^): AnyRefMap[K, V] = source match { case source: AnyRefMap[_, _] => source.clone().asInstanceOf[AnyRefMap[K, V]] case _ => buildFromIterableOnce(source) } @@ -560,7 +562,7 @@ object AnyRefMap { /** Creates a new `AnyRefMap` from arrays of keys and values. * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`. */ - def fromZip[K <: AnyRef, V](keys: Array[K], values: Array[V]): AnyRefMap[K, V] = { + def fromZip[K <: AnyRef, sealed V](keys: Array[K], values: Array[V]): AnyRefMap[K, V] = { val sz = math.min(keys.length, values.length) val arm = new AnyRefMap[K, V](sz * 2) var i = 0 @@ -572,7 +574,7 @@ object AnyRefMap { /** Creates a new `AnyRefMap` from keys and values. * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`. */ - def fromZip[K <: AnyRef, V](keys: Iterable[K], values: Iterable[V]): AnyRefMap[K, V] = { + def fromZip[K <: AnyRef, sealed V](keys: Iterable[K]^, values: Iterable[V]^): AnyRefMap[K, V] = { val sz = math.min(keys.size, values.size) val arm = new AnyRefMap[K, V](sz * 2) val ki = keys.iterator @@ -582,20 +584,20 @@ object AnyRefMap { arm } - implicit def toFactory[K <: AnyRef, V](dummy: AnyRefMap.type): Factory[(K, V), AnyRefMap[K, V]] = ToFactory.asInstanceOf[Factory[(K, V), AnyRefMap[K, V]]] + implicit def toFactory[K <: AnyRef, sealed V](dummy: AnyRefMap.type): Factory[(K, V), AnyRefMap[K, V]] = ToFactory.asInstanceOf[Factory[(K, V), AnyRefMap[K, V]]] @SerialVersionUID(3L) private[this] object ToFactory extends Factory[(AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] with Serializable { - def fromSpecific(it: IterableOnce[(AnyRef, AnyRef)]): AnyRefMap[AnyRef, AnyRef] = AnyRefMap.from[AnyRef, AnyRef](it) + def fromSpecific(it: IterableOnce[(AnyRef, AnyRef)]^): AnyRefMap[AnyRef, AnyRef] = AnyRefMap.from[AnyRef, AnyRef](it) def newBuilder: Builder[(AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] = AnyRefMap.newBuilder[AnyRef, AnyRef] } - implicit def toBuildFrom[K <: AnyRef, V](factory: AnyRefMap.type): BuildFrom[Any, (K, V), AnyRefMap[K, V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (K, V), AnyRefMap[K, V]]] + implicit def toBuildFrom[K <: AnyRef, sealed V](factory: AnyRefMap.type): BuildFrom[Any, (K, V), AnyRefMap[K, V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (K, V), AnyRefMap[K, V]]] private[this] object ToBuildFrom extends BuildFrom[Any, (AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] { - def fromSpecific(from: Any)(it: IterableOnce[(AnyRef, AnyRef)]) = AnyRefMap.from(it) + def fromSpecific(from: Any)(it: IterableOnce[(AnyRef, AnyRef)]^) = AnyRefMap.from(it) def newBuilder(from: Any) = AnyRefMap.newBuilder[AnyRef, AnyRef] } - implicit def iterableFactory[K <: AnyRef, V]: Factory[(K, V), AnyRefMap[K, V]] = toFactory[K, V](this) - implicit def buildFromAnyRefMap[K <: AnyRef, V]: BuildFrom[AnyRefMap[_, _], (K, V), AnyRefMap[K, V]] = toBuildFrom(this) + implicit def iterableFactory[K <: AnyRef, sealed V]: Factory[(K, V), AnyRefMap[K, V]] = toFactory[K, V](this) + implicit def buildFromAnyRefMap[K <: AnyRef, sealed V]: BuildFrom[AnyRefMap[_, _], (K, V), AnyRefMap[K, V]] = toBuildFrom(this) } diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala b/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala index e3ddeb71ef8e..8fa1e6edd566 100644 --- a/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala +++ b/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala @@ -20,6 +20,8 @@ import scala.annotation.nowarn import scala.annotation.tailrec import scala.collection.Stepper.EfficientSplit import scala.collection.generic.DefaultSerializable +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** An implementation of the `Buffer` class using an array to * represent the assembled sequence internally. Append, update and random @@ -40,7 +42,7 @@ import scala.collection.generic.DefaultSerializable * @define willNotTerminateInf */ @SerialVersionUID(-1582447879429021880L) -class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) +class ArrayBuffer[sealed A] private (initialElements: Array[AnyRef], initialSize: Int) extends AbstractBuffer[A] with IndexedBuffer[A] with IndexedSeqOps[A, ArrayBuffer, ArrayBuffer[A]] @@ -151,7 +153,7 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) } // Overridden to use array copying for efficiency where possible. - override def addAll(elems: IterableOnce[A]): this.type = { + override def addAll(elems: IterableOnce[A]^): this.type = { elems match { case elems: ArrayBuffer[_] => val elemsLength = elems.size0 @@ -180,7 +182,7 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) this } - def insertAll(@deprecatedName("n", "2.13.0") index: Int, elems: IterableOnce[A]): Unit = { + def insertAll(@deprecatedName("n", "2.13.0") index: Int, elems: IterableOnce[A]^): Unit = { checkWithinBounds(index, index) elems match { case elems: collection.Iterable[A] => @@ -234,12 +236,12 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) @deprecated("Use 'new GrowableBuilder(this).mapResult(f)' instead", "2.13.0") @deprecatedOverriding("ArrayBuffer[A] no longer extends Builder[A, ArrayBuffer[A]]", "2.13.0") - @inline def mapResult[NewTo](f: (ArrayBuffer[A]) => NewTo): Builder[A, NewTo] = new GrowableBuilder[A, ArrayBuffer[A]](this).mapResult(f) + @inline def mapResult[NewTo](f: (ArrayBuffer[A]) => NewTo): Builder[A, NewTo]^{f} = new GrowableBuilder[A, ArrayBuffer[A]](this).mapResult(f) @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") override protected[this] def stringPrefix = "ArrayBuffer" - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) if(copied > 0) { Array.copy(array, 0, xs, start, copied) @@ -256,7 +258,7 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) override def sortInPlace[B >: A]()(implicit ord: Ordering[B]): this.type = { if (length > 1) { mutationCount += 1 - scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]], 0, length) + scala.util.Sorting.stableSort(array.asInstanceOf[Array[B @uncheckedCaptures]], 0, length) } this } @@ -291,7 +293,7 @@ object ArrayBuffer extends StrictOptimizedSeqFactory[ArrayBuffer] { final val DefaultInitialSize = 16 private[this] val emptyArray = new Array[AnyRef](0) - def from[B](coll: collection.IterableOnce[B]): ArrayBuffer[B] = { + def from[sealed B](coll: collection.IterableOnce[B]^): ArrayBuffer[B] = { val k = coll.knownSize if (k >= 0) { // Avoid reallocation of buffer if length is known @@ -303,12 +305,12 @@ object ArrayBuffer extends StrictOptimizedSeqFactory[ArrayBuffer] { else new ArrayBuffer[B] ++= coll } - def newBuilder[A]: Builder[A, ArrayBuffer[A]] = + def newBuilder[sealed A]: Builder[A, ArrayBuffer[A]] = new GrowableBuilder[A, ArrayBuffer[A]](empty) { override def sizeHint(size: Int): Unit = elems.ensureSize(size) } - def empty[A]: ArrayBuffer[A] = new ArrayBuffer[A]() + def empty[sealed A]: ArrayBuffer[A] = new ArrayBuffer[A]() /** * @param arrayLen the length of the backing array @@ -357,22 +359,23 @@ object ArrayBuffer extends StrictOptimizedSeqFactory[ArrayBuffer] { } // TODO: use `CheckedIndexedSeqView.Id` once we can change the return type of `ArrayBuffer#view` -final class ArrayBufferView[A] private[mutable](underlying: ArrayBuffer[A], mutationCount: () => Int) - extends AbstractIndexedSeqView[A] { +final class ArrayBufferView[sealed A] private[mutable](underlying: ArrayBuffer[A], mutationCount: () -> Int) + extends AbstractIndexedSeqView[A], Pure { + /* Removed since it poses problems for capture checking @deprecated("never intended to be public; call ArrayBuffer#view instead", since = "2.13.7") def this(array: Array[AnyRef], length: Int) = { // this won't actually track mutation, but it would be a pain to have the implementation // check if we have a method to get the current mutation count or not on every method and // change what it does based on that. hopefully no one ever calls this. this({ - val _array = array + val _array: Array[Object] = array val _length = length new ArrayBuffer[A](0) { this.array = _array this.size0 = _length - } + }: ArrayBuffer[A] }, () => 0) - } + }*/ @deprecated("never intended to be public", since = "2.13.7") def array: Array[AnyRef] = underlying.toArray[Any].asInstanceOf[Array[AnyRef]] @@ -392,10 +395,10 @@ final class ArrayBufferView[A] private[mutable](underlying: ArrayBuffer[A], muta override def takeRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) override def drop(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) override def dropRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) - override def map[B](f: A => B): IndexedSeqView[B] = new CheckedIndexedSeqView.Map(this, f)(mutationCount) + override def map[B](f: A => B): IndexedSeqView[B]^{f} = new CheckedIndexedSeqView.Map(this, f)(mutationCount) override def reverse: IndexedSeqView[A] = new CheckedIndexedSeqView.Reverse(this)(mutationCount) override def slice(from: Int, until: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) - override def tapEach[U](f: A => U): IndexedSeqView[A] = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) + override def tapEach[U](f: A => U): IndexedSeqView[A]^{f} = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala index 454527bcdebd..0620d3d23061 100644 --- a/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala +++ b/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala @@ -13,6 +13,7 @@ package scala.collection package mutable +import language.experimental.captureChecking import scala.reflect.ClassTag /** A builder class for arrays. @@ -20,7 +21,7 @@ import scala.reflect.ClassTag * @tparam T the type of the elements for the builder. */ @SerialVersionUID(3L) -sealed abstract class ArrayBuilder[T] +sealed abstract class ArrayBuilder[sealed T] extends ReusableBuilder[T, Array[T]] with Serializable { protected[this] var capacity: Int = 0 @@ -57,7 +58,7 @@ sealed abstract class ArrayBuilder[T] this } - override def addAll(xs: IterableOnce[T]): this.type = { + override def addAll(xs: IterableOnce[T]^): this.type = { val k = xs.knownSize if (k > 0) { ensureSize(this.size + k) @@ -493,7 +494,7 @@ object ArrayBuilder { this } - override def addAll(xs: IterableOnce[Unit]): this.type = { + override def addAll(xs: IterableOnce[Unit]^): this.type = { size += xs.iterator.size this } diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala b/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala index 205e1607f824..f22aacec65c5 100644 --- a/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala +++ b/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala @@ -18,6 +18,7 @@ import scala.annotation.nowarn import scala.collection.Stepper.EfficientSplit import scala.collection.generic.DefaultSerializable import scala.reflect.ClassTag +import language.experimental.captureChecking /** An implementation of a double-ended queue that internally uses a resizable circular buffer. * @@ -36,7 +37,7 @@ import scala.reflect.ClassTag * @define mayNotTerminateInf * @define willNotTerminateInf */ -class ArrayDeque[A] protected ( +class ArrayDeque[sealed A] protected ( protected var array: Array[AnyRef], private[ArrayDeque] var start: Int, private[ArrayDeque] var end: Int @@ -99,7 +100,7 @@ class ArrayDeque[A] protected ( this } - override def prependAll(elems: IterableOnce[A]): this.type = { + override def prependAll(elems: IterableOnce[A]^): this.type = { val it = elems.iterator if (it.nonEmpty) { val n = length @@ -130,7 +131,7 @@ class ArrayDeque[A] protected ( this } - override def addAll(elems: IterableOnce[A]): this.type = { + override def addAll(elems: IterableOnce[A]^): this.type = { elems.knownSize match { case srcLength if srcLength > 0 => ensureSize(srcLength + length) @@ -176,7 +177,7 @@ class ArrayDeque[A] protected ( } } - def insertAll(idx: Int, elems: IterableOnce[A]): Unit = { + def insertAll(idx: Int, elems: IterableOnce[A]^): Unit = { requireBounds(idx, length+1) val n = length if (idx == 0) { @@ -462,7 +463,7 @@ class ArrayDeque[A] protected ( protected def ofArray(array: Array[AnyRef], end: Int): ArrayDeque[A] = new ArrayDeque[A](array, start = 0, end) - override def copyToArray[B >: A](dest: Array[B], destStart: Int, len: Int): Int = { + override def copyToArray[sealed B >: A](dest: Array[B], destStart: Int, len: Int): Int = { val copied = IterableOnce.elemsToCopyToArray(length, dest.length, destStart, len) if (copied > 0) { copySliceToArray(srcStart = 0, dest = dest, destStart = destStart, maxItems = len) @@ -470,7 +471,7 @@ class ArrayDeque[A] protected ( copied } - override def toArray[B >: A: ClassTag]: Array[B] = + override def toArray[sealed B >: A: ClassTag]: Array[B] = copySliceToArray(srcStart = 0, dest = new Array[B](length), destStart = 0, maxItems = length) /** @@ -525,7 +526,7 @@ class ArrayDeque[A] protected ( @SerialVersionUID(3L) object ArrayDeque extends StrictOptimizedSeqFactory[ArrayDeque] { - def from[B](coll: collection.IterableOnce[B]): ArrayDeque[B] = { + def from[sealed B](coll: collection.IterableOnce[B]^): ArrayDeque[B] = { val s = coll.knownSize if (s >= 0) { val array = alloc(s) @@ -535,14 +536,14 @@ object ArrayDeque extends StrictOptimizedSeqFactory[ArrayDeque] { } else new ArrayDeque[B]() ++= coll } - def newBuilder[A]: Builder[A, ArrayDeque[A]] = + def newBuilder[sealed A]: Builder[A, ArrayDeque[A]] = new GrowableBuilder[A, ArrayDeque[A]](empty) { override def sizeHint(size: Int): Unit = { elems.ensureSize(size) } } - def empty[A]: ArrayDeque[A] = new ArrayDeque[A]() + def empty[sealed A]: ArrayDeque[A] = new ArrayDeque[A]() final val DefaultInitialSize = 16 diff --git a/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala index 74ab6b2107e5..bd3a208a94c0 100644 --- a/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala +++ b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala @@ -19,6 +19,8 @@ import scala.collection.Stepper.EfficientSplit import scala.collection.convert.impl._ import scala.reflect.ClassTag import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking +import annotation.unchecked.uncheckedCaptures /** * A collection representing `Array[T]`. Unlike `ArrayBuffer` it is always backed by the same @@ -34,23 +36,25 @@ import scala.util.hashing.MurmurHash3 * @define willNotTerminateInf */ @SerialVersionUID(3L) -sealed abstract class ArraySeq[T] +sealed abstract class ArraySeq[sealed T] extends AbstractSeq[T] with IndexedSeq[T] with IndexedSeqOps[T, ArraySeq, ArraySeq[T]] with StrictOptimizedSeqOps[T, ArraySeq, ArraySeq[T]] - with Serializable { + with Serializable + with Pure { override def iterableFactory: scala.collection.SeqFactory[ArraySeq] = ArraySeq.untagged - override protected def fromSpecific(coll: scala.collection.IterableOnce[T]): ArraySeq[T] = { + override protected def fromSpecific(coll: scala.collection.IterableOnce[T]^): ArraySeq[T] = { val b = ArrayBuilder.make(elemTag).asInstanceOf[ArrayBuilder[T]] val s = coll.knownSize if(s > 0) b.sizeHint(s) b ++= coll ArraySeq.make(b.result()) } - override protected def newSpecificBuilder: Builder[T, ArraySeq[T]] = ArraySeq.newBuilder(elemTag).asInstanceOf[Builder[T, ArraySeq[T]]] + override protected def newSpecificBuilder: Builder[T, ArraySeq[T]] = + ArraySeq.newBuilder[T](elemTag.asInstanceOf[ClassTag[T]]).asInstanceOf[Builder[T, ArraySeq[T]]] override def empty: ArraySeq[T] = ArraySeq.empty(elemTag.asInstanceOf[ClassTag[T]]) /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive @@ -71,9 +75,9 @@ sealed abstract class ArraySeq[T] override protected[this] def className = "ArraySeq" /** Clones this object, including the underlying Array. */ - override def clone(): ArraySeq[T] = ArraySeq.make(array.clone()).asInstanceOf[ArraySeq[T]] + override def clone(): ArraySeq[T] = ArraySeq.make[T](array.clone().asInstanceOf[Array[T]]) - override def copyToArray[B >: T](xs: Array[B], start: Int, len: Int): Int = { + override def copyToArray[sealed B >: T](xs: Array[B], start: Int, len: Int): Int = { val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) if(copied > 0) { Array.copy(array, 0, xs, start, copied) @@ -89,10 +93,10 @@ sealed abstract class ArraySeq[T] } override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq[T] = - ArraySeq.make(array.sorted(ord.asInstanceOf[Ordering[Any]])).asInstanceOf[ArraySeq[T]] + ArraySeq.make(array.asInstanceOf[Array[T]].sorted(ord.asInstanceOf[Ordering[Any]])).asInstanceOf[ArraySeq[T]] override def sortInPlace[B >: T]()(implicit ord: Ordering[B]): this.type = { - if (length > 1) scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]]) + if (length > 1) scala.util.Sorting.stableSort(array.asInstanceOf[Array[B @uncheckedCaptures]]) this } } @@ -107,9 +111,9 @@ object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => private[this] val EmptyArraySeq = new ofRef[AnyRef](new Array[AnyRef](0)) def empty[T : ClassTag]: ArraySeq[T] = EmptyArraySeq.asInstanceOf[ArraySeq[T]] - def from[A : ClassTag](it: scala.collection.IterableOnce[A]): ArraySeq[A] = make(Array.from[A](it)) + def from[sealed A : ClassTag](it: scala.collection.IterableOnce[A]^): ArraySeq[A] = make(Array.from[A](it)) - def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] = ArrayBuilder.make[A].mapResult(make) + def newBuilder[sealed A : ClassTag]: Builder[A, ArraySeq[A]] = ArrayBuilder.make[A].mapResult(make) /** * Wrap an existing `Array` into a `ArraySeq` of the proper primitive specialization type @@ -123,7 +127,7 @@ object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => * `ArraySeq.make(a.asInstanceOf[Array[Int]])` does not work, it throws a `ClassCastException` * at runtime. */ - def make[T](x: Array[T]): ArraySeq[T] = ((x.asInstanceOf[Array[_]]: @unchecked) match { + def make[sealed T](x: Array[T]): ArraySeq[T] = ((x.asInstanceOf[Array[_]]: @unchecked) match { case null => null case x: Array[AnyRef] => new ofRef[AnyRef](x) case x: Array[Int] => new ofInt(x) diff --git a/tests/pos-special/stdlib/collection/mutable/BitSet.scala b/tests/pos-special/stdlib/collection/mutable/BitSet.scala index 69ecc122c1f9..dcb8a157389b 100644 --- a/tests/pos-special/stdlib/collection/mutable/BitSet.scala +++ b/tests/pos-special/stdlib/collection/mutable/BitSet.scala @@ -17,6 +17,7 @@ package mutable import scala.collection.immutable.Range import BitSetOps.{LogWL, MaxSize} import scala.annotation.implicitNotFound +import language.experimental.captureChecking /** * A class for mutable bitsets. @@ -47,7 +48,7 @@ class BitSet(protected[collection] final var elems: Array[Long]) def this() = this(0) - override protected def fromSpecific(coll: IterableOnce[Int]): BitSet = bitSetFactory.fromSpecific(coll) + override protected def fromSpecific(coll: IterableOnce[Int]^): BitSet = bitSetFactory.fromSpecific(coll) override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder override def empty: BitSet = bitSetFactory.empty @@ -187,7 +188,7 @@ class BitSet(protected[collection] final var elems: Array[Long]) override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.zipOrdMsg) ev: Ordering[(Int, B)]): SortedSet[(Int, B)] = super.zip(that) - override def addAll(xs: IterableOnce[Int]): this.type = xs match { + override def addAll(xs: IterableOnce[Int]^): this.type = xs match { case bs: collection.BitSet => this |= bs case range: Range => @@ -260,7 +261,7 @@ class BitSet(protected[collection] final var elems: Array[Long]) super.subsetOf(other) } - override def subtractAll(xs: IterableOnce[Int]): this.type = xs match { + override def subtractAll(xs: IterableOnce[Int]^): this.type = xs match { case bs: collection.BitSet => this &~= bs case other => super.subtractAll(other) } @@ -360,7 +361,7 @@ class BitSet(protected[collection] final var elems: Array[Long]) @SerialVersionUID(3L) object BitSet extends SpecificIterableFactory[Int, BitSet] { - def fromSpecific(it: scala.collection.IterableOnce[Int]): BitSet = Growable.from(empty, it) + def fromSpecific(it: scala.collection.IterableOnce[Int]^): BitSet = Growable.from(empty, it) def empty: BitSet = new BitSet() diff --git a/tests/pos-special/stdlib/collection/mutable/Buffer.scala b/tests/pos-special/stdlib/collection/mutable/Buffer.scala index 847b924735ce..0f472dc9ac82 100644 --- a/tests/pos-special/stdlib/collection/mutable/Buffer.scala +++ b/tests/pos-special/stdlib/collection/mutable/Buffer.scala @@ -14,10 +14,12 @@ package scala.collection package mutable import scala.annotation.nowarn +import language.experimental.captureChecking +import scala.annotation.unchecked.uncheckedCaptures /** A `Buffer` is a growable and shrinkable `Seq`. */ -trait Buffer[A] +trait Buffer[sealed A] extends Seq[A] with SeqOps[A, Buffer, Buffer[A]] with Growable[A] @@ -48,19 +50,19 @@ trait Buffer[A] /** Appends the elements contained in a iterable object to this buffer. * @param xs the iterable object containing the elements to append. */ - @`inline` final def appendAll(xs: IterableOnce[A]): this.type = addAll(xs) + @`inline` final def appendAll(xs: IterableOnce[A]^): this.type = addAll(xs) /** Alias for `prepend` */ @`inline` final def +=: (elem: A): this.type = prepend(elem) - def prependAll(elems: IterableOnce[A]): this.type = { insertAll(0, elems); this } + def prependAll(elems: IterableOnce[A]^): this.type = { insertAll(0, elems); this } @deprecated("Use prependAll instead", "2.13.0") @`inline` final def prepend(elems: A*): this.type = prependAll(elems) /** Alias for `prependAll` */ - @inline final def ++=:(elems: IterableOnce[A]): this.type = prependAll(elems) + @inline final def ++=:(elems: IterableOnce[A]^): this.type = prependAll(elems) /** Inserts a new element at a given index into this buffer. * @@ -81,7 +83,7 @@ trait Buffer[A] * @throws IndexOutOfBoundsException if `idx` is out of bounds. */ @throws[IndexOutOfBoundsException] - def insertAll(idx: Int, elems: IterableOnce[A]): Unit + def insertAll(idx: Int, elems: IterableOnce[A]^): Unit /** Removes the element at a given index position. * @@ -103,7 +105,7 @@ trait Buffer[A] @throws[IndexOutOfBoundsException] @throws[IllegalArgumentException] def remove(idx: Int, count: Int): Unit - + /** Removes a single element from this buffer, at its first occurrence. * If the buffer does not contain that element, it is unchanged. * @@ -132,7 +134,7 @@ trait Buffer[A] @deprecated("use dropRightInPlace instead", since = "2.13.4") def trimEnd(n: Int): Unit = dropRightInPlace(n) - def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A], replaced: Int): this.type + def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A]^, replaced: Int): this.type // +=, ++=, clear inherited from Growable // Per remark of @ichoran, we should preferably not have these: @@ -180,11 +182,11 @@ trait IndexedBuffer[A] extends IndexedSeq[A] override def iterableFactory: SeqFactory[IndexedBuffer] = IndexedBuffer - def flatMapInPlace(f: A => IterableOnce[A]): this.type = { + def flatMapInPlace(f: A => IterableOnce[A]^): this.type = { // There's scope for a better implementation which copies elements in place. var i = 0 val s = size - val newElems = new Array[IterableOnce[A]](s) + val newElems = new Array[(IterableOnce[A]^) @uncheckedCaptures](s) while (i < s) { newElems(i) = f(this(i)); i += 1 } clear() i = 0 @@ -207,7 +209,7 @@ trait IndexedBuffer[A] extends IndexedSeq[A] if (i == j) this else takeInPlace(j) } - def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A], replaced: Int): this.type = { + def patchInPlace(from: Int, patch: scala.collection.IterableOnce[A]^, replaced: Int): this.type = { val replaced0 = math.min(math.max(replaced, 0), length) val i = math.min(math.max(from, 0), length) var j = 0 diff --git a/tests/pos-special/stdlib/collection/mutable/Builder.scala b/tests/pos-special/stdlib/collection/mutable/Builder.scala index 0ecc06dff061..dd57cb75da91 100644 --- a/tests/pos-special/stdlib/collection/mutable/Builder.scala +++ b/tests/pos-special/stdlib/collection/mutable/Builder.scala @@ -12,6 +12,9 @@ package scala.collection.mutable +import language.experimental.captureChecking + + /** Base trait for collection builders. * * After calling `result()` the behavior of a Builder (which is not also a [[scala.collection.mutable.ReusableBuilder]]) @@ -20,7 +23,8 @@ package scala.collection.mutable * * @see [[scala.collection.mutable.ReusableBuilder]] for Builders which can be reused after calling `result()` */ -trait Builder[-A, +To] extends Growable[A] { self => +trait Builder[-A, +To] extends Growable[A] { + self: Builder[A, To]^ => /** Clears the contents of this builder. * After execution of this method the builder will contain no elements. @@ -51,7 +55,7 @@ trait Builder[-A, +To] extends Growable[A] { self => * @param coll the collection which serves as a hint for the result's size. * @param delta a correction to add to the `coll.size` to produce the size hint. */ - final def sizeHint(coll: scala.collection.IterableOnce[_], delta: Int = 0): Unit = { + final def sizeHint(coll: scala.collection.IterableOnce[_]^, delta: Int = 0): Unit = { val s = coll.knownSize if (s != -1) sizeHint(s + delta) } @@ -69,7 +73,7 @@ trait Builder[-A, +To] extends Growable[A] { self => * than collection's size are reduced. */ // should probably be `boundingColl: IterableOnce[_]`, but binary compatibility - final def sizeHintBounded(size: Int, boundingColl: scala.collection.Iterable[_]): Unit = { + final def sizeHintBounded(size: Int, boundingColl: scala.collection.Iterable[_]^): Unit = { val s = boundingColl.knownSize if (s != -1) { sizeHint(scala.math.min(s, size)) @@ -77,10 +81,10 @@ trait Builder[-A, +To] extends Growable[A] { self => } /** A builder resulting from this builder my mapping the result using `f`. */ - def mapResult[NewTo](f: To => NewTo): Builder[A, NewTo] = new Builder[A, NewTo] { + def mapResult[NewTo](f: To => NewTo): Builder[A, NewTo]^{this, f} = new Builder[A, NewTo] { def addOne(x: A): this.type = { self += x; this } def clear(): Unit = self.clear() - override def addAll(xs: IterableOnce[A]): this.type = { self ++= xs; this } + override def addAll(xs: IterableOnce[A]^): this.type = { self ++= xs; this } override def sizeHint(size: Int): Unit = self.sizeHint(size) def result(): NewTo = f(self.result()) override def knownSize: Int = self.knownSize diff --git a/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala b/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala index b9598904375d..152b6cc9ffc7 100644 --- a/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala +++ b/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala @@ -13,34 +13,37 @@ package scala package collection package mutable +import language.experimental.captureChecking private[mutable] trait CheckedIndexedSeqView[+A] extends IndexedSeqView[A] { + this: CheckedIndexedSeqView[A]^ => + protected val mutationCount: () => Int - override def iterator: Iterator[A] = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) - override def reverseIterator: Iterator[A] = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) - - override def appended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) - override def prepended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) - override def take(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Take(this, n)(mutationCount) - override def takeRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) - override def drop(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) - override def dropRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) - override def map[B](f: A => B): IndexedSeqView[B] = new CheckedIndexedSeqView.Map(this, f)(mutationCount) - override def reverse: IndexedSeqView[A] = new CheckedIndexedSeqView.Reverse(this)(mutationCount) - override def slice(from: Int, until: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) - override def tapEach[U](f: A => U): IndexedSeqView[A] = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) - - override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) - override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) - override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) + override def iterator: Iterator[A]^{this} = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) + override def reverseIterator: Iterator[A]^{this} = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) + + override def appended[B >: A](elem: B): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) + override def prepended[B >: A](elem: B): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) + override def take(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Take(this, n)(mutationCount) + override def takeRight(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) + override def drop(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) + override def dropRight(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) + override def map[B](f: A => B): IndexedSeqView[B]^{this, f} = new CheckedIndexedSeqView.Map(this, f)(mutationCount) + override def reverse: IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Reverse(this)(mutationCount) + override def slice(from: Int, until: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) + override def tapEach[U](f: A => U): IndexedSeqView[A]^{this, f} = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) + + override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) } private[mutable] object CheckedIndexedSeqView { import IndexedSeqView.SomeIndexedSeqOps @SerialVersionUID(3L) - private[mutable] class CheckedIterator[A](self: IndexedSeqView[A], mutationCount: => Int) + private[mutable] class CheckedIterator[A](self: IndexedSeqView[A]^, mutationCount: => Int) extends IndexedSeqView.IndexedSeqViewIterator[A](self) { private[this] val expectedCount = mutationCount override def hasNext: Boolean = { @@ -50,7 +53,7 @@ private[mutable] object CheckedIndexedSeqView { } @SerialVersionUID(3L) - private[mutable] class CheckedReverseIterator[A](self: IndexedSeqView[A], mutationCount: => Int) + private[mutable] class CheckedReverseIterator[A](self: IndexedSeqView[A]^, mutationCount: => Int) extends IndexedSeqView.IndexedSeqViewReverseIterator[A](self) { private[this] val expectedCount = mutationCount override def hasNext: Boolean = { @@ -60,43 +63,43 @@ private[mutable] object CheckedIndexedSeqView { } @SerialVersionUID(3L) - class Id[+A](underlying: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + class Id[+A](underlying: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) extends IndexedSeqView.Id(underlying) with CheckedIndexedSeqView[A] @SerialVersionUID(3L) - class Appended[+A](underlying: SomeIndexedSeqOps[A], elem: A)(protected val mutationCount: () => Int) + class Appended[+A](underlying: SomeIndexedSeqOps[A]^, elem: A)(protected val mutationCount: () => Int) extends IndexedSeqView.Appended(underlying, elem) with CheckedIndexedSeqView[A] @SerialVersionUID(3L) - class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) extends IndexedSeqView.Prepended(elem, underlying) with CheckedIndexedSeqView[A] @SerialVersionUID(3L) - class Concat[A](prefix: SomeIndexedSeqOps[A], suffix: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + class Concat[A](prefix: SomeIndexedSeqOps[A]^, suffix: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) extends IndexedSeqView.Concat[A](prefix, suffix) with CheckedIndexedSeqView[A] @SerialVersionUID(3L) - class Take[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + class Take[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) extends IndexedSeqView.Take(underlying, n) with CheckedIndexedSeqView[A] @SerialVersionUID(3L) - class TakeRight[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + class TakeRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) extends IndexedSeqView.TakeRight(underlying, n) with CheckedIndexedSeqView[A] @SerialVersionUID(3L) - class Drop[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + class Drop[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) extends IndexedSeqView.Drop[A](underlying, n) with CheckedIndexedSeqView[A] @SerialVersionUID(3L) - class DropRight[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + class DropRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int) extends IndexedSeqView.DropRight[A](underlying, n) with CheckedIndexedSeqView[A] @SerialVersionUID(3L) - class Map[A, B](underlying: SomeIndexedSeqOps[A], f: A => B)(protected val mutationCount: () => Int) + class Map[A, B](underlying: SomeIndexedSeqOps[A]^, f: A => B)(protected val mutationCount: () => Int) extends IndexedSeqView.Map(underlying, f) with CheckedIndexedSeqView[B] @SerialVersionUID(3L) - class Reverse[A](underlying: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + class Reverse[A](underlying: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int) extends IndexedSeqView.Reverse[A](underlying) with CheckedIndexedSeqView[A] { override def reverse: IndexedSeqView[A] = underlying match { case x: IndexedSeqView[A] => x @@ -105,7 +108,7 @@ private[mutable] object CheckedIndexedSeqView { } @SerialVersionUID(3L) - class Slice[A](underlying: SomeIndexedSeqOps[A], from: Int, until: Int)(protected val mutationCount: () => Int) + class Slice[A](underlying: SomeIndexedSeqOps[A]^, from: Int, until: Int)(protected val mutationCount: () => Int) extends AbstractIndexedSeqView[A] with CheckedIndexedSeqView[A] { protected val lo = from max 0 protected val hi = (until max 0) min underlying.length diff --git a/tests/pos-special/stdlib/collection/mutable/Cloneable.scala b/tests/pos-special/stdlib/collection/mutable/Cloneable.scala index 940ecf3549ad..39149e98cbf0 100644 --- a/tests/pos-special/stdlib/collection/mutable/Cloneable.scala +++ b/tests/pos-special/stdlib/collection/mutable/Cloneable.scala @@ -11,7 +11,7 @@ */ package scala.collection.mutable - +import language.experimental.captureChecking /** A trait for cloneable collections. * diff --git a/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala b/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala index 8542b5b56a01..2b27efb6eac1 100644 --- a/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala @@ -18,6 +18,7 @@ import scala.annotation.{implicitNotFound, tailrec, unused} import scala.annotation.unchecked.uncheckedVariance import scala.collection.generic.DefaultSerializationProxy import scala.runtime.Statics +import language.experimental.captureChecking /** This class implements mutable maps using a hashtable with red-black trees in the buckets for good * worst-case performance on hash collisions. An `Ordering` is required for the element type. Equality @@ -32,7 +33,7 @@ import scala.runtime.Statics * @define mayNotTerminateInf * @define willNotTerminateInf */ -final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double)(implicit ordering: Ordering[K]) +final class CollisionProofHashMap[sealed K, sealed V](initialCapacity: Int, loadFactor: Double)(implicit ordering: Ordering[K]) extends AbstractMap[K, V] with MapOps[K, V, Map, CollisionProofHashMap[K, V]] //-- with StrictOptimizedIterableOps[(K, V), Iterable, CollisionProofHashMap[K, V]] @@ -63,7 +64,7 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double @`inline` private[this] final def index(hash: Int) = hash & (table.length - 1) - override protected def fromSpecific(coll: IterableOnce[(K, V)] @uncheckedVariance): CollisionProofHashMap[K, V] @uncheckedVariance = CollisionProofHashMap.from(coll) + override protected def fromSpecific(coll: (IterableOnce[(K, V)]^) @uncheckedVariance): CollisionProofHashMap[K, V] @uncheckedVariance = CollisionProofHashMap.from(coll) override protected def newSpecificBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] @uncheckedVariance = CollisionProofHashMap.newBuilder[K, V] override def empty: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] @@ -173,7 +174,7 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double } } - override def addAll(xs: IterableOnce[(K, V)]): this.type = { + override def addAll(xs: IterableOnce[(K, V)]^): this.type = { val k = xs.knownSize if(k > 0) sizeHint(contentSize + k) super.addAll(xs) @@ -442,13 +443,13 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = sortedMapFactory.from(new View.Collect(this, pf)) - override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CollisionProofHashMap[K, V2] = sortedMapFactory.from(suffix match { + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CollisionProofHashMap[K, V2] = sortedMapFactory.from(suffix match { case it: Iterable[(K, V2)] => new View.Concat(this, it) case _ => iterator.concat(suffix.iterator) }) /** Alias for `concat` */ - @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]): CollisionProofHashMap[K, V2] = concat(xs) + @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]^): CollisionProofHashMap[K, V2] = concat(xs) @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") override def + [V1 >: V](kv: (K, V1)): CollisionProofHashMap[K, V1] = @@ -743,17 +744,17 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a CollisionProofHashMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." - def from[K : Ordering, V](it: scala.collection.IterableOnce[(K, V)]): CollisionProofHashMap[K, V] = { + def from[sealed K : Ordering, sealed V](it: scala.collection.IterableOnce[(K, V)]^): CollisionProofHashMap[K, V] = { val k = it.knownSize val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity new CollisionProofHashMap[K, V](cap, defaultLoadFactor) ++= it } - def empty[K : Ordering, V]: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] + def empty[sealed K : Ordering, sealed V]: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] - def newBuilder[K : Ordering, V]: Builder[(K, V), CollisionProofHashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + def newBuilder[sealed K : Ordering, sealed V]: Builder[(K, V), CollisionProofHashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) - def newBuilder[K : Ordering, V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), CollisionProofHashMap[K, V]] = + def newBuilder[sealed K : Ordering, sealed V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), CollisionProofHashMap[K, V]] = new GrowableBuilder[(K, V), CollisionProofHashMap[K, V]](new CollisionProofHashMap[K, V](initialCapacity, loadFactor)) { override def sizeHint(size: Int) = elems.sizeHint(size) } @@ -765,8 +766,8 @@ object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { final def defaultInitialCapacity: Int = 16 @SerialVersionUID(3L) - private final class DeserializationFactory[K, V](val tableLength: Int, val loadFactor: Double, val ordering: Ordering[K]) extends Factory[(K, V), CollisionProofHashMap[K, V]] with Serializable { - def fromSpecific(it: IterableOnce[(K, V)]): CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V](tableLength, loadFactor)(ordering) ++= it + private final class DeserializationFactory[sealed K, sealed V](val tableLength: Int, val loadFactor: Double, val ordering: Ordering[K]) extends Factory[(K, V), CollisionProofHashMap[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]^): CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V](tableLength, loadFactor)(ordering) ++= it def newBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] = CollisionProofHashMap.newBuilder(tableLength, loadFactor)(ordering) } @@ -788,7 +789,7 @@ object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { /////////////////////////// Red-Black Tree Node - final class RBNode[K, V](var key: K, var hash: Int, var value: V, var red: Boolean, var left: RBNode[K, V], var right: RBNode[K, V], var parent: RBNode[K, V]) extends Node { + final class RBNode[sealed K, sealed V](var key: K, var hash: Int, var value: V, var red: Boolean, var left: RBNode[K, V], var right: RBNode[K, V], var parent: RBNode[K, V]) extends Node { override def toString: String = "RBNode(" + key + ", " + hash + ", " + value + ", " + red + ", " + left + ", " + right + ")" @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): RBNode[K, V] = { @@ -819,17 +820,17 @@ object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { } } - @`inline` private def leaf[A, B](key: A, hash: Int, value: B, red: Boolean, parent: RBNode[A, B]): RBNode[A, B] = + @`inline` private def leaf[sealed A, sealed B](key: A, hash: Int, value: B, red: Boolean, parent: RBNode[A, B]): RBNode[A, B] = new RBNode(key, hash, value, red, null, null, parent) - @tailrec private def minNodeNonNull[A, B](node: RBNode[A, B]): RBNode[A, B] = + @tailrec private def minNodeNonNull[sealed A, sealed B](node: RBNode[A, B]): RBNode[A, B] = if (node.left eq null) node else minNodeNonNull(node.left) /** * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is, * therefore, the last node), this method returns `null`. */ - private def successor[A, B](node: RBNode[A, B]): RBNode[A, B] = { + private def successor[sealed A, sealed B](node: RBNode[A, B]): RBNode[A, B] = { if (node.right ne null) minNodeNonNull(node.right) else { var x = node @@ -842,7 +843,7 @@ object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { } } - private final class RBNodesIterator[A, B](tree: RBNode[A, B])(implicit @unused ord: Ordering[A]) extends AbstractIterator[RBNode[A, B]] { + private final class RBNodesIterator[sealed A, sealed B](tree: RBNode[A, B])(implicit @unused ord: Ordering[A]) extends AbstractIterator[RBNode[A, B]] { private[this] var nextNode: RBNode[A, B] = if(tree eq null) null else minNodeNonNull(tree) def hasNext: Boolean = nextNode ne null @@ -858,7 +859,7 @@ object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { /////////////////////////// Linked List Node - private final class LLNode[K, V](var key: K, var hash: Int, var value: V, var next: LLNode[K, V]) extends Node { + private final class LLNode[sealed K, sealed V](var key: K, var hash: Int, var value: V, var next: LLNode[K, V]) extends Node { override def toString = s"LLNode($key, $value, $hash) -> $next" private[this] def eq(a: Any, b: Any): Boolean = diff --git a/tests/pos-special/stdlib/collection/mutable/Growable.scala b/tests/pos-special/stdlib/collection/mutable/Growable.scala index 914742b9013a..3b5eabac37bf 100644 --- a/tests/pos-special/stdlib/collection/mutable/Growable.scala +++ b/tests/pos-special/stdlib/collection/mutable/Growable.scala @@ -14,6 +14,8 @@ package scala package collection package mutable +import language.experimental.captureChecking + /** This trait forms part of collections that can be augmented * using a `+=` operator and that can be cleared of all elements using * a `clear` method. @@ -54,7 +56,7 @@ trait Growable[-A] extends Clearable { * @param xs the IterableOnce producing the elements to $add. * @return the $coll itself. */ - def addAll(xs: IterableOnce[A]): this.type = { + def addAll(xs: IterableOnce[A]^): this.type = { if (xs.asInstanceOf[AnyRef] eq this) addAll(Buffer.from(xs)) // avoid mutating under our own iterator else { val it = xs.iterator @@ -66,7 +68,7 @@ trait Growable[-A] extends Clearable { } /** Alias for `addAll` */ - @`inline` final def ++= (xs: IterableOnce[A]): this.type = addAll(xs) + @`inline` final def ++= (xs: IterableOnce[A]^): this.type = addAll(xs) /** @return The number of elements in the collection under construction, if it can be cheaply computed, * -1 otherwise. The default implementation always returns -1. @@ -83,7 +85,7 @@ object Growable { * @tparam A Element type * @return The filled instance */ - def from[A](empty: Growable[A], it: collection.IterableOnce[A]): empty.type = empty ++= it + def from[A](empty: Growable[A], it: collection.IterableOnce[A]^): empty.type = empty ++= it } diff --git a/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala index 7e945dffb99e..4d6f989e6f3d 100644 --- a/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala +++ b/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala @@ -12,7 +12,7 @@ package scala package collection.mutable - +import language.experimental.captureChecking /** The canonical builder for collections that are growable, i.e. that support an * efficient `+=` method which adds an element to the collection. @@ -31,7 +31,7 @@ class GrowableBuilder[Elem, To <: Growable[Elem]](protected val elems: To) def addOne(elem: Elem): this.type = { elems += elem; this } - override def addAll(xs: IterableOnce[Elem]): this.type = { elems.addAll(xs); this } + override def addAll(xs: IterableOnce[Elem]^): this.type = { elems.addAll(xs); this } override def knownSize: Int = elems.knownSize } diff --git a/tests/pos-special/stdlib/collection/mutable/HashMap.scala b/tests/pos-special/stdlib/collection/mutable/HashMap.scala index 7ad3cf3869e8..ab45e7ffc73d 100644 --- a/tests/pos-special/stdlib/collection/mutable/HashMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/HashMap.scala @@ -17,6 +17,7 @@ import scala.annotation.{nowarn, tailrec} import scala.collection.Stepper.EfficientSplit import scala.collection.generic.DefaultSerializationProxy import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking /** This class implements mutable maps using a hashtable. * @@ -32,7 +33,7 @@ import scala.util.hashing.MurmurHash3 * @define willNotTerminateInf */ @deprecatedInheritance("HashMap will be made final; use .withDefault for the common use case of computing a default value", "2.13.0") -class HashMap[K, V](initialCapacity: Int, loadFactor: Double) +class HashMap[sealed K, sealed V](initialCapacity: Int, loadFactor: Double) extends AbstractMap[K, V] with MapOps[K, V, HashMap, HashMap[K, V]] with StrictOptimizedIterableOps[(K, V), Iterable, HashMap[K, V]] @@ -94,7 +95,7 @@ class HashMap[K, V](initialCapacity: Int, loadFactor: Double) if(target > table.length) growTable(target) } - override def addAll(xs: IterableOnce[(K, V)]): this.type = { + override def addAll(xs: IterableOnce[(K, V)]^): this.type = { sizeHint(xs.knownSize) xs match { @@ -182,7 +183,7 @@ class HashMap[K, V](initialCapacity: Int, loadFactor: Double) } } - override def subtractAll(xs: IterableOnce[K]): this.type = { + override def subtractAll(xs: IterableOnce[K]^): this.type = { if (size == 0) { return this } @@ -596,17 +597,17 @@ class HashMap[K, V](initialCapacity: Int, loadFactor: Double) @SerialVersionUID(3L) object HashMap extends MapFactory[HashMap] { - def empty[K, V]: HashMap[K, V] = new HashMap[K, V] + def empty[sealed K, sealed V]: HashMap[K, V] = new HashMap[K, V] - def from[K, V](it: collection.IterableOnce[(K, V)]): HashMap[K, V] = { + def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^): HashMap[K, V] = { val k = it.knownSize val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity new HashMap[K, V](cap, defaultLoadFactor).addAll(it) } - def newBuilder[K, V]: Builder[(K, V), HashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + def newBuilder[sealed K, sealed V]: Builder[(K, V), HashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) - def newBuilder[K, V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), HashMap[K, V]] = + def newBuilder[sealed K, sealed V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), HashMap[K, V]] = new GrowableBuilder[(K, V), HashMap[K, V]](new HashMap[K, V](initialCapacity, loadFactor)) { override def sizeHint(size: Int) = elems.sizeHint(size) } @@ -618,8 +619,8 @@ object HashMap extends MapFactory[HashMap] { final def defaultInitialCapacity: Int = 16 @SerialVersionUID(3L) - private final class DeserializationFactory[K, V](val tableLength: Int, val loadFactor: Double) extends Factory[(K, V), HashMap[K, V]] with Serializable { - def fromSpecific(it: IterableOnce[(K, V)]): HashMap[K, V] = new HashMap[K, V](tableLength, loadFactor).addAll(it) + private final class DeserializationFactory[sealed K, sealed V](val tableLength: Int, val loadFactor: Double) extends Factory[(K, V), HashMap[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]^): HashMap[K, V] = new HashMap[K, V](tableLength, loadFactor).addAll(it) def newBuilder: Builder[(K, V), HashMap[K, V]] = HashMap.newBuilder(tableLength, loadFactor) } diff --git a/tests/pos-special/stdlib/collection/mutable/HashSet.scala b/tests/pos-special/stdlib/collection/mutable/HashSet.scala index 425721a41626..e8c055ff15ef 100644 --- a/tests/pos-special/stdlib/collection/mutable/HashSet.scala +++ b/tests/pos-special/stdlib/collection/mutable/HashSet.scala @@ -17,6 +17,7 @@ import scala.annotation.tailrec import scala.collection.Stepper.EfficientSplit import scala.collection.generic.DefaultSerializationProxy import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking /** This class implements mutable sets using a hashtable. * @@ -28,7 +29,7 @@ import scala.util.hashing.MurmurHash3 * @define mayNotTerminateInf * @define willNotTerminateInf */ -final class HashSet[A](initialCapacity: Int, loadFactor: Double) +final class HashSet[sealed A](initialCapacity: Int, loadFactor: Double) extends AbstractSet[A] with SetOps[A, HashSet, HashSet[A]] with StrictOptimizedIterableOps[A, HashSet, HashSet[A]] @@ -90,7 +91,7 @@ final class HashSet[A](initialCapacity: Int, loadFactor: Double) addElem(elem, computeHash(elem)) } - override def addAll(xs: IterableOnce[A]): this.type = { + override def addAll(xs: IterableOnce[A]^): this.type = { sizeHint(xs.knownSize) xs match { case hs: immutable.HashSet[A] => @@ -114,7 +115,7 @@ final class HashSet[A](initialCapacity: Int, loadFactor: Double) } } - override def subtractAll(xs: IterableOnce[A]): this.type = { + override def subtractAll(xs: IterableOnce[A]^): this.type = { if (size == 0) { return this } @@ -406,17 +407,17 @@ final class HashSet[A](initialCapacity: Int, loadFactor: Double) @SerialVersionUID(3L) object HashSet extends IterableFactory[HashSet] { - def from[B](it: scala.collection.IterableOnce[B]): HashSet[B] = { + def from[sealed B](it: scala.collection.IterableOnce[B]^): HashSet[B] = { val k = it.knownSize val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity new HashSet[B](cap, defaultLoadFactor) ++= it } - def empty[A]: HashSet[A] = new HashSet[A] + def empty[sealed A]: HashSet[A] = new HashSet[A] - def newBuilder[A]: Builder[A, HashSet[A]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + def newBuilder[sealed A]: Builder[A, HashSet[A]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) - def newBuilder[A](initialCapacity: Int, loadFactor: Double): Builder[A, HashSet[A]] = + def newBuilder[sealed A](initialCapacity: Int, loadFactor: Double): Builder[A, HashSet[A]] = new GrowableBuilder[A, HashSet[A]](new HashSet[A](initialCapacity, loadFactor)) { override def sizeHint(size: Int) = elems.sizeHint(size) } @@ -428,8 +429,8 @@ object HashSet extends IterableFactory[HashSet] { final def defaultInitialCapacity: Int = 16 @SerialVersionUID(3L) - private final class DeserializationFactory[A](val tableLength: Int, val loadFactor: Double) extends Factory[A, HashSet[A]] with Serializable { - def fromSpecific(it: IterableOnce[A]): HashSet[A] = new HashSet[A](tableLength, loadFactor) ++= it + private final class DeserializationFactory[sealed A](val tableLength: Int, val loadFactor: Double) extends Factory[A, HashSet[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]^): HashSet[A] = new HashSet[A](tableLength, loadFactor) ++= it def newBuilder: Builder[A, HashSet[A]] = HashSet.newBuilder(tableLength, loadFactor) } diff --git a/tests/pos-special/stdlib/collection/mutable/HashTable.scala b/tests/pos-special/stdlib/collection/mutable/HashTable.scala index 4153bd532163..a3534e322cf3 100644 --- a/tests/pos-special/stdlib/collection/mutable/HashTable.scala +++ b/tests/pos-special/stdlib/collection/mutable/HashTable.scala @@ -19,6 +19,7 @@ import java.lang.Integer.{numberOfLeadingZeros, rotateRight} import scala.util.hashing.byteswap32 import java.lang.Integer +import language.experimental.captureChecking /** This class can be used to construct data structures that are based * on hashtables. Class `HashTable[A]` implements a hashtable @@ -36,7 +37,7 @@ import java.lang.Integer * @tparam A type of the elements contained in this hash table. */ // Not used in the standard library, but used in scala-parallel-collections -private[collection] trait HashTable[A, B, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashUtils[A] { +private[collection] trait HashTable[sealed A, B, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashUtils[A] { // Replacing Entry type parameter by abstract type member here allows to not expose to public // implementation-specific entry classes such as `DefaultEntry` or `LinkedEntry`. // However, I'm afraid it's too late now for such breaking change. @@ -411,7 +412,7 @@ private[collection] object HashTable { /** Class used internally. */ -private[collection] trait HashEntry[A, E <: HashEntry[A, E]] { +private[collection] trait HashEntry[A, sealed E <: HashEntry[A, E]] { val key: A var next: E = _ } diff --git a/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala index c801f073fb0d..1af98162e9f3 100644 --- a/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala +++ b/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala @@ -13,6 +13,7 @@ package scala package collection package mutable +import language.experimental.captureChecking /** diff --git a/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala b/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala index 24d54905de22..022970b4c56f 100644 --- a/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala +++ b/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala @@ -12,6 +12,7 @@ package scala.collection package mutable +import language.experimental.captureChecking trait IndexedSeq[T] extends Seq[T] with scala.collection.IndexedSeq[T] diff --git a/tests/pos-special/stdlib/collection/mutable/Iterable.scala b/tests/pos-special/stdlib/collection/mutable/Iterable.scala index d05aeed88044..bf286157b376 100644 --- a/tests/pos-special/stdlib/collection/mutable/Iterable.scala +++ b/tests/pos-special/stdlib/collection/mutable/Iterable.scala @@ -13,11 +13,13 @@ package scala.collection.mutable import scala.collection.{IterableFactory, IterableFactoryDefaults} +import language.experimental.captureChecking trait Iterable[A] extends collection.Iterable[A] with collection.IterableOps[A, Iterable, Iterable[A]] with IterableFactoryDefaults[A, Iterable] { + this: Iterable[A]^ => override def iterableFactory: IterableFactory[Iterable] = Iterable } @@ -31,4 +33,5 @@ trait Iterable[A] object Iterable extends IterableFactory.Delegate[Iterable](ArrayBuffer) /** Explicit instantiation of the `Iterable` trait to reduce class file size in subclasses. */ -abstract class AbstractIterable[A] extends scala.collection.AbstractIterable[A] with Iterable[A] +abstract class AbstractIterable[A] extends scala.collection.AbstractIterable[A] with Iterable[A]: + this: AbstractIterable[A]^ => diff --git a/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala b/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala index bc663f1d37d8..a253e8738b26 100644 --- a/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala @@ -17,6 +17,7 @@ package mutable import scala.annotation.{nowarn, tailrec} import scala.collection.generic.DefaultSerializable import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking /** This class implements mutable maps using a hashtable. @@ -33,7 +34,7 @@ import scala.util.hashing.MurmurHash3 * @define orderDependentFold */ @deprecatedInheritance("LinkedHashMap will be made final; use .withDefault for the common use case of computing a default value", "2.13.11") -class LinkedHashMap[K, V] +class LinkedHashMap[sealed K, sealed V] extends AbstractMap[K, V] with SeqMap[K, V] with MapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]] @@ -476,20 +477,20 @@ class LinkedHashMap[K, V] @SerialVersionUID(3L) object LinkedHashMap extends MapFactory[LinkedHashMap] { - def empty[K, V] = new LinkedHashMap[K, V] + def empty[sealed K, sealed V] = new LinkedHashMap[K, V] - def from[K, V](it: collection.IterableOnce[(K, V)]) = { + def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^) = { val newlhm = empty[K, V] newlhm.sizeHint(it.knownSize) newlhm.addAll(it) newlhm } - def newBuilder[K, V] = new GrowableBuilder(empty[K, V]) + def newBuilder[sealed K, sealed V] = new GrowableBuilder(empty[K, V]) /** Class for the linked hash map entry, used internally. */ - private[mutable] final class LinkedEntry[K, V](val key: K, val hash: Int, var value: V) { + private[mutable] final class LinkedEntry[sealed K, sealed V](val key: K, val hash: Int, var value: V) { var earlier: LinkedEntry[K, V] = null var later: LinkedEntry[K, V] = null var next: LinkedEntry[K, V] = null diff --git a/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala b/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala index 0c01f8ea79ea..a895034a852c 100644 --- a/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala +++ b/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala @@ -17,6 +17,7 @@ package mutable import scala.annotation.{nowarn, tailrec} import scala.collection.generic.DefaultSerializable import scala.util.hashing.MurmurHash3 +import language.experimental.captureChecking /** This class implements mutable sets using a hashtable. * The iterator and all traversal methods of this class visit elements in the order they were inserted. @@ -31,7 +32,7 @@ import scala.util.hashing.MurmurHash3 * @define orderDependentFold */ @deprecatedInheritance("LinkedHashSet will be made final", "2.13.11") -class LinkedHashSet[A] +class LinkedHashSet[sealed A] extends AbstractSet[A] with SetOps[A, LinkedHashSet, LinkedHashSet[A]] with StrictOptimizedIterableOps[A, LinkedHashSet, LinkedHashSet[A]] @@ -314,20 +315,20 @@ class LinkedHashSet[A] @SerialVersionUID(3L) object LinkedHashSet extends IterableFactory[LinkedHashSet] { - override def empty[A]: LinkedHashSet[A] = new LinkedHashSet[A] + override def empty[sealed A]: LinkedHashSet[A] = new LinkedHashSet[A] - def from[E](it: collection.IterableOnce[E]) = { + def from[sealed E](it: collection.IterableOnce[E]^) = { val newlhs = empty[E] newlhs.sizeHint(it.knownSize) newlhs.addAll(it) newlhs } - def newBuilder[A] = new GrowableBuilder(empty[A]) + def newBuilder[sealed A] = new GrowableBuilder(empty[A]) /** Class for the linked hash set entry, used internally. */ - private[mutable] final class Entry[A](val key: A, val hash: Int) { + private[mutable] final class Entry[sealed A](val key: A, val hash: Int) { var earlier: Entry[A] = null var later: Entry[A] = null var next: Entry[A] = null diff --git a/tests/pos-special/stdlib/collection/mutable/ListBuffer.scala b/tests/pos-special/stdlib/collection/mutable/ListBuffer.scala index d66525763163..4f607c770130 100644 --- a/tests/pos-special/stdlib/collection/mutable/ListBuffer.scala +++ b/tests/pos-special/stdlib/collection/mutable/ListBuffer.scala @@ -19,6 +19,8 @@ import java.lang.{IllegalArgumentException, IndexOutOfBoundsException} import scala.collection.generic.DefaultSerializable import scala.runtime.Statics.releaseFence +import scala.annotation.unchecked.uncheckedCaptures +import language.experimental.captureChecking /** A `Buffer` implementation backed by a list. It provides constant time * prepend and append. Most other operations are linear. @@ -36,7 +38,7 @@ import scala.runtime.Statics.releaseFence * @define willNotTerminateInf */ @SerialVersionUID(-8428291952499836345L) -class ListBuffer[A] +class ListBuffer[sealed A] extends AbstractBuffer[A] with SeqOps[A, ListBuffer, ListBuffer[A]] with StrictOptimizedSeqOps[A, ListBuffer, ListBuffer[A]] @@ -121,7 +123,7 @@ class ListBuffer[A] } // MUST only be called on fresh instances - private def freshFrom(xs: IterableOnce[A]): this.type = { + private def freshFrom(xs: IterableOnce[A]^): this.type = { val it = xs.iterator if (it.hasNext) { var len = 1 @@ -140,7 +142,7 @@ class ListBuffer[A] this } - override final def addAll(xs: IterableOnce[A]): this.type = { + override final def addAll(xs: IterableOnce[A]^): this.type = { val it = xs.iterator if (it.hasNext) { val fresh = new ListBuffer[A].freshFrom(it) @@ -248,7 +250,7 @@ class ListBuffer[A] } } - def insertAll(idx: Int, elems: IterableOnce[A]): Unit = { + def insertAll(idx: Int, elems: IterableOnce[A]^): Unit = { if (idx < 0 || idx > len) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${len-1})") val it = elems.iterator if (it.hasNext) { @@ -305,7 +307,7 @@ class ListBuffer[A] this } - def flatMapInPlace(f: A => IterableOnce[A]): this.type = { + def flatMapInPlace(f: A => IterableOnce[A]^): this.type = { mutationCount += 1 var src = first var dst: List[A] = null @@ -345,7 +347,7 @@ class ListBuffer[A] this } - def patchInPlace(from: Int, patch: collection.IterableOnce[A], replaced: Int): this.type = { + def patchInPlace(from: Int, patch: collection.IterableOnce[A]^, replaced: Int): this.type = { val _len = len val _from = math.max(from, 0) // normalized val _replaced = math.max(replaced, 0) // normalized @@ -395,9 +397,9 @@ class ListBuffer[A] @SerialVersionUID(3L) object ListBuffer extends StrictOptimizedSeqFactory[ListBuffer] { - def from[A](coll: collection.IterableOnce[A]): ListBuffer[A] = new ListBuffer[A].freshFrom(coll) + def from[sealed A](coll: collection.IterableOnce[A]^): ListBuffer[A] = new ListBuffer[A].freshFrom(coll) - def newBuilder[A]: Builder[A, ListBuffer[A]] = new GrowableBuilder(empty[A]) + def newBuilder[sealed A]: Builder[A, ListBuffer[A]] = new GrowableBuilder(empty[A]) - def empty[A]: ListBuffer[A] = new ListBuffer[A] + def empty[A]: ListBuffer[A] = new ListBuffer[A @uncheckedCaptures] } diff --git a/tests/pos-special/stdlib/collection/mutable/ListMap.scala b/tests/pos-special/stdlib/collection/mutable/ListMap.scala index 7cc5aa227757..8ddbc264e47b 100644 --- a/tests/pos-special/stdlib/collection/mutable/ListMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/ListMap.scala @@ -16,6 +16,7 @@ package mutable import scala.annotation.tailrec import scala.collection.generic.DefaultSerializable import scala.collection.immutable.List +import language.experimental.captureChecking /** A simple mutable map backed by a list, so it preserves insertion order. * @@ -30,7 +31,7 @@ import scala.collection.immutable.List * @define orderDependentFold */ @deprecated("Use an immutable.ListMap assigned to a var instead of mutable.ListMap", "2.13.0") -class ListMap[K, V] +class ListMap[sealed K, sealed V] extends AbstractMap[K, V] with MapOps[K, V, ListMap, ListMap[K, V]] with StrictOptimizedIterableOps[(K, V), Iterable, ListMap[K, V]] @@ -76,7 +77,7 @@ class ListMap[K, V] @SerialVersionUID(3L) @deprecated("Use an immutable.ListMap assigned to a var instead of mutable.ListMap", "2.13.0") object ListMap extends MapFactory[ListMap] { - def empty[K, V]: ListMap[K, V] = new ListMap[K, V] - def from[K, V](it: IterableOnce[(K, V)]): ListMap[K,V] = Growable.from(empty[K, V], it) - def newBuilder[K, V]: Builder[(K, V), ListMap[K,V]] = new GrowableBuilder(empty[K, V]) + def empty[sealed K, sealed V]: ListMap[K, V] = new ListMap[K, V] + def from[sealed K, sealed V](it: IterableOnce[(K, V)]^): ListMap[K,V] = Growable.from(empty[K, V], it) + def newBuilder[sealed K, sealed V]: Builder[(K, V), ListMap[K,V]] = new GrowableBuilder(empty[K, V]) } diff --git a/tests/pos-special/stdlib/collection/mutable/LongMap.scala b/tests/pos-special/stdlib/collection/mutable/LongMap.scala index af34ca4ab8c9..2c757160ec77 100644 --- a/tests/pos-special/stdlib/collection/mutable/LongMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/LongMap.scala @@ -15,6 +15,7 @@ package mutable import scala.collection.generic.DefaultSerializationProxy import scala.language.implicitConversions +import language.experimental.captureChecking /** This class implements mutable maps with `Long` keys based on a hash table with open addressing. * @@ -36,7 +37,7 @@ import scala.language.implicitConversions * rapidly as 2^30 is approached. * */ -final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBufferSize: Int, initBlank: Boolean) +final class LongMap[sealed V] private[collection] (defaultEntry: Long -> V, initialBufferSize: Int, initBlank: Boolean) extends AbstractMap[Long, V] with MapOps[Long, V, Map, LongMap[V]] with StrictOptimizedIterableOps[(Long, V), Iterable, LongMap[V]] @@ -46,7 +47,7 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff def this() = this(LongMap.exceptionDefault, 16, true) // TODO: override clear() with an optimization more tailored for efficiency. - override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, V)]): LongMap[V] = { + override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, V)]^): LongMap[V] = { //TODO should this be the default implementation of this method in StrictOptimizedIterableOps? val b = newSpecificBuilder b.sizeHint(coll) @@ -56,7 +57,7 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff override protected def newSpecificBuilder: Builder[(Long, V),LongMap[V]] = new GrowableBuilder(LongMap.empty[V]) /** Creates a new `LongMap` that returns default values according to a supplied key-value mapping. */ - def this(defaultEntry: Long => V) = this(defaultEntry, 16, true) + def this(defaultEntry: Long -> V) = this(defaultEntry, 16, true) /** Creates a new `LongMap` with an initial buffer of specified size. * @@ -66,7 +67,7 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff def this(initialBufferSize: Int) = this(LongMap.exceptionDefault, initialBufferSize, true) /** Creates a new `LongMap` with specified default values and initial buffer size. */ - def this(defaultEntry: Long => V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) + def this(defaultEntry: Long -> V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true) private[this] var mask = 0 private[this] var extraKeys: Int = 0 @@ -468,18 +469,18 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff } @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - override def + [V1 >: V](elem1: (Long, V1), elem2: (Long, V1), elems: (Long, V1)*): LongMap[V1] = { + override def + [sealed V1 >: V](elem1: (Long, V1), elem2: (Long, V1), elems: (Long, V1)*): LongMap[V1] = { val m = this + elem1 + elem2 if(elems.isEmpty) m else m.concat(elems) } - override def concat[V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]): LongMap[V1] = { + override def concat[sealed V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = { val lm = clone().asInstanceOf[LongMap[V1]] xs.iterator.foreach(kv => lm += kv) lm } - override def ++ [V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]): LongMap[V1] = concat(xs) + override def ++ [sealed V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = concat(xs) @deprecated("Use m.clone().addOne(k,v) instead of m.updated(k, v)", "2.13.0") override def updated[V1 >: V](key: Long, value: V1): LongMap[V1] = @@ -519,7 +520,7 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff * Unlike `mapValues`, this method generates a new * collection immediately. */ - def mapValuesNow[V1](f: V => V1): LongMap[V1] = { + def mapValuesNow[sealed V1](f: V => V1): LongMap[V1] = { val zv = if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null val mv = if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null val lm = new LongMap[V1](LongMap.exceptionDefault, 1, false) @@ -562,11 +563,11 @@ final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBuff this } - def map[V2](f: ((Long, V)) => (Long, V2)): LongMap[V2] = LongMap.from(new View.Map(coll, f)) + def map[sealed V2](f: ((Long, V)) => (Long, V2)): LongMap[V2] = LongMap.from(new View.Map(coll, f)) - def flatMap[V2](f: ((Long, V)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f)) + def flatMap[sealed V2](f: ((Long, V)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f)) - def collect[V2](pf: PartialFunction[(Long, V), (Long, V2)]): LongMap[V2] = + def collect[sealed V2](pf: PartialFunction[(Long, V), (Long, V2)]): LongMap[V2] = strictOptimizedCollect(LongMap.newBuilder[V2], pf) protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(LongMap.toFactory[V](LongMap), this) @@ -580,13 +581,13 @@ object LongMap { private final val VacantBit = 0x40000000 private final val MissVacant = 0xC0000000 - private val exceptionDefault: Long => Nothing = (k: Long) => throw new NoSuchElementException(k.toString) + private val exceptionDefault: Long -> Nothing = (k: Long) => throw new NoSuchElementException(k.toString) /** A builder for instances of `LongMap`. * * This builder can be reused to create multiple instances. */ - final class LongMapBuilder[V] extends ReusableBuilder[(Long, V), LongMap[V]] { + final class LongMapBuilder[sealed V] extends ReusableBuilder[(Long, V), LongMap[V]] { private[collection] var elems: LongMap[V] = new LongMap[V] override def addOne(entry: (Long, V)): this.type = { elems += entry @@ -598,9 +599,9 @@ object LongMap { } /** Creates a new `LongMap` with zero or more key/value pairs. */ - def apply[V](elems: (Long, V)*): LongMap[V] = buildFromIterableOnce(elems) + def apply[sealed V](elems: (Long, V)*): LongMap[V] = buildFromIterableOnce(elems) - private def buildFromIterableOnce[V](elems: IterableOnce[(Long, V)]): LongMap[V] = { + private def buildFromIterableOnce[sealed V](elems: IterableOnce[(Long, V)]^): LongMap[V] = { var sz = elems.knownSize if(sz < 0) sz = 4 val lm = new LongMap[V](sz * 2) @@ -610,10 +611,10 @@ object LongMap { } /** Creates a new empty `LongMap`. */ - def empty[V]: LongMap[V] = new LongMap[V] + def empty[sealed V]: LongMap[V] = new LongMap[V] /** Creates a new empty `LongMap` with the supplied default */ - def withDefault[V](default: Long => V): LongMap[V] = new LongMap[V](default) + def withDefault[sealed V](default: Long -> V): LongMap[V] = new LongMap[V](default) /** Creates a new `LongMap` from an existing source collection. A source collection * which is already a `LongMap` gets cloned. @@ -622,17 +623,17 @@ object LongMap { * @tparam A the type of the collection’s elements * @return a new `LongMap` with the elements of `source` */ - def from[V](source: IterableOnce[(Long, V)]): LongMap[V] = source match { + def from[sealed V](source: IterableOnce[(Long, V)]^): LongMap[V] = source match { case source: LongMap[_] => source.clone().asInstanceOf[LongMap[V]] case _ => buildFromIterableOnce(source) } - def newBuilder[V]: ReusableBuilder[(Long, V), LongMap[V]] = new LongMapBuilder[V] + def newBuilder[sealed V]: ReusableBuilder[(Long, V), LongMap[V]] = new LongMapBuilder[V] /** Creates a new `LongMap` from arrays of keys and values. * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. */ - def fromZip[V](keys: Array[Long], values: Array[V]): LongMap[V] = { + def fromZip[sealed V](keys: Array[Long], values: Array[V]): LongMap[V] = { val sz = math.min(keys.length, values.length) val lm = new LongMap[V](sz * 2) var i = 0 @@ -644,7 +645,7 @@ object LongMap { /** Creates a new `LongMap` from keys and values. * Equivalent to but more efficient than `LongMap((keys zip values): _*)`. */ - def fromZip[V](keys: scala.collection.Iterable[Long], values: scala.collection.Iterable[V]): LongMap[V] = { + def fromZip[sealed V](keys: scala.collection.Iterable[Long], values: scala.collection.Iterable[V]): LongMap[V] = { val sz = math.min(keys.size, values.size) val lm = new LongMap[V](sz * 2) val ki = keys.iterator @@ -654,20 +655,20 @@ object LongMap { lm } - implicit def toFactory[V](dummy: LongMap.type): Factory[(Long, V), LongMap[V]] = ToFactory.asInstanceOf[Factory[(Long, V), LongMap[V]]] + implicit def toFactory[sealed V](dummy: LongMap.type): Factory[(Long, V), LongMap[V]] = ToFactory.asInstanceOf[Factory[(Long, V), LongMap[V]]] @SerialVersionUID(3L) private[this] object ToFactory extends Factory[(Long, AnyRef), LongMap[AnyRef]] with Serializable { - def fromSpecific(it: IterableOnce[(Long, AnyRef)]): LongMap[AnyRef] = LongMap.from[AnyRef](it) + def fromSpecific(it: IterableOnce[(Long, AnyRef)]^): LongMap[AnyRef] = LongMap.from[AnyRef](it) def newBuilder: Builder[(Long, AnyRef), LongMap[AnyRef]] = LongMap.newBuilder[AnyRef] } implicit def toBuildFrom[V](factory: LongMap.type): BuildFrom[Any, (Long, V), LongMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Long, V), LongMap[V]]] private object ToBuildFrom extends BuildFrom[Any, (Long, AnyRef), LongMap[AnyRef]] { - def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]) = LongMap.from(it) + def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]^) = LongMap.from(it) def newBuilder(from: Any) = LongMap.newBuilder[AnyRef] } - implicit def iterableFactory[V]: Factory[(Long, V), LongMap[V]] = toFactory(this) + implicit def iterableFactory[sealed V]: Factory[(Long, V), LongMap[V]] = toFactory(this) implicit def buildFromLongMap[V]: BuildFrom[LongMap[_], (Long, V), LongMap[V]] = toBuildFrom(this) } diff --git a/tests/pos-special/stdlib/collection/mutable/Map.scala b/tests/pos-special/stdlib/collection/mutable/Map.scala index 610dc01029cc..dab64ddb1f58 100644 --- a/tests/pos-special/stdlib/collection/mutable/Map.scala +++ b/tests/pos-special/stdlib/collection/mutable/Map.scala @@ -14,6 +14,8 @@ package scala package collection package mutable +import language.experimental.captureChecking + /** Base type of mutable Maps */ trait Map[K, V] extends Iterable[(K, V)] @@ -44,7 +46,7 @@ trait Map[K, V] * @param d the function mapping keys to values, used for non-present keys * @return a wrapper of the map with a default value */ - def withDefault(d: K => V): Map[K, V] = new Map.WithDefault[K, V](this, d) + def withDefault(d: K -> V): Map[K, V] = new Map.WithDefault[K, V](this, d) /** The same map with a given default value. * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. @@ -68,7 +70,8 @@ trait MapOps[K, V, +CC[X, Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] with Cloneable[C] with Builder[(K, V), C] with Growable[(K, V)] - with Shrinkable[K] { + with Shrinkable[K] + with Pure { def result(): C = coll @@ -231,7 +234,7 @@ trait MapOps[K, V, +CC[X, Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] object Map extends MapFactory.Delegate[Map](HashMap) { @SerialVersionUID(3L) - class WithDefault[K, V](val underlying: Map[K, V], val defaultValue: K => V) + class WithDefault[K, V](val underlying: Map[K, V], val defaultValue: K -> V) extends AbstractMap[K, V] with MapOps[K, V, Map, WithDefault[K, V]] with Serializable { @@ -250,12 +253,12 @@ object Map extends MapFactory.Delegate[Map](HashMap) { def addOne(elem: (K, V)): WithDefault.this.type = { underlying.addOne(elem); this } - override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]): Map[K, V2] = + override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]^): Map[K, V2] = underlying.concat(suffix).withDefault(defaultValue) override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) - override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]): WithDefault[K, V] = + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): WithDefault[K, V] = new WithDefault[K, V](mapFactory.from(coll), defaultValue) override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] = diff --git a/tests/pos-special/stdlib/collection/mutable/MultiMap.scala b/tests/pos-special/stdlib/collection/mutable/MultiMap.scala index 13d7c35e0165..281631c92298 100644 --- a/tests/pos-special/stdlib/collection/mutable/MultiMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/MultiMap.scala @@ -12,6 +12,7 @@ package scala.collection.mutable +import language.experimental.captureChecking /** A trait for mutable maps with multiple values assigned to a key. * @@ -51,7 +52,7 @@ package scala.collection.mutable * @define Coll `MultiMap` */ @deprecated("Use a scala.collection.mutable.MultiDict in the scala-collection-contrib module", "2.13.0") -trait MultiMap[K, V] extends Map[K, Set[V]] { +trait MultiMap[K, sealed V] extends Map[K, Set[V]] { /** Creates a new set. * * Classes that use this trait as a mixin can override this method diff --git a/tests/pos-special/stdlib/collection/mutable/MutationTracker.scala b/tests/pos-special/stdlib/collection/mutable/MutationTracker.scala index e98536d0dad5..3e9b16540031 100644 --- a/tests/pos-special/stdlib/collection/mutable/MutationTracker.scala +++ b/tests/pos-special/stdlib/collection/mutable/MutationTracker.scala @@ -15,6 +15,7 @@ package collection package mutable import java.util.ConcurrentModificationException +import language.experimental.captureChecking /** * Utilities to check that mutations to a client that tracks @@ -66,7 +67,7 @@ private object MutationTracker { * @param mutationCount a by-name provider of the current mutation count * @tparam A the type of the iterator's elements */ - final class CheckedIterator[A](underlying: Iterator[A], mutationCount: => Int) extends AbstractIterator[A] { + final class CheckedIterator[A](underlying: Iterator[A]^, mutationCount: => Int) extends AbstractIterator[A] { private[this] val expectedCount = mutationCount def hasNext: Boolean = { diff --git a/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala b/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala index 22e99d4650d1..f1deb25b6a8a 100644 --- a/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala @@ -16,6 +16,7 @@ package mutable import java.lang.Integer.numberOfLeadingZeros import java.util.ConcurrentModificationException import scala.collection.generic.DefaultSerializable +import language.experimental.captureChecking /** * @define Coll `OpenHashMap` @@ -25,10 +26,10 @@ import scala.collection.generic.DefaultSerializable @SerialVersionUID(3L) object OpenHashMap extends MapFactory[OpenHashMap] { - def empty[K, V] = new OpenHashMap[K, V] - def from[K, V](it: IterableOnce[(K, V)]): OpenHashMap[K,V] = empty ++= it + def empty[sealed K, sealed V] = new OpenHashMap[K, V] + def from[sealed K, sealed V](it: IterableOnce[(K, V)]^): OpenHashMap[K,V] = empty ++= it - def newBuilder[K, V]: Builder[(K, V), OpenHashMap[K,V]] = + def newBuilder[sealed K, sealed V]: Builder[(K, V), OpenHashMap[K,V]] = new GrowableBuilder[(K, V), OpenHashMap[K, V]](empty) /** A hash table entry. @@ -38,7 +39,7 @@ object OpenHashMap extends MapFactory[OpenHashMap] { * If its `key` is not the default value of type `Key`, the entry is occupied. * If the entry is occupied, `hash` contains the hash value of `key`. */ - final private class OpenEntry[Key, Value](var key: Key, + final private class OpenEntry[sealed Key, sealed Value](var key: Key, var hash: Int, var value: Option[Value]) @@ -61,7 +62,7 @@ object OpenHashMap extends MapFactory[OpenHashMap] { * @define willNotTerminateInf */ @deprecated("Use HashMap or one of the specialized versions (LongMap, AnyRefMap) instead of OpenHashMap", "2.13.0") -class OpenHashMap[Key, Value](initialSize : Int) +class OpenHashMap[sealed Key, sealed Value](initialSize : Int) extends AbstractMap[Key, Value] with MapOps[Key, Value, OpenHashMap, OpenHashMap[Key, Value]] with StrictOptimizedIterableOps[(Key, Value), Iterable, OpenHashMap[Key, Value]] diff --git a/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala b/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala index 5572bdca3cf6..a395fac4a44a 100644 --- a/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala +++ b/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala @@ -15,6 +15,7 @@ package mutable import scala.collection.generic.DefaultSerializationProxy import scala.math.Ordering +import language.experimental.captureChecking /** A heap-based priority queue. * @@ -66,7 +67,7 @@ import scala.math.Ordering * @define mayNotTerminateInf * @define willNotTerminateInf */ -sealed class PriorityQueue[A](implicit val ord: Ordering[A]) +sealed class PriorityQueue[sealed A](implicit val ord: Ordering[A]) extends AbstractIterable[A] with Iterable[A] with IterableOps[A, Iterable, PriorityQueue[A]] @@ -77,7 +78,7 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) with Serializable { - private class ResizableArrayAccess[A0] extends ArrayBuffer[A0] { + private class ResizableArrayAccess[sealed A0] extends ArrayBuffer[A0] { override def mapInPlace(f: A0 => A0): this.type = { var i = 1 // see "we do not use array(0)" comment below (???) val siz = this.size @@ -106,7 +107,7 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) override def isEmpty: Boolean = resarr.p_size0 < 2 // not eligible for EvidenceIterableFactoryDefaults since C != CC[A] (PriorityQueue[A] != Iterable[A]) - override protected def fromSpecific(coll: scala.collection.IterableOnce[A]): PriorityQueue[A] = PriorityQueue.from(coll) + override protected def fromSpecific(coll: scala.collection.IterableOnce[A]^): PriorityQueue[A] = PriorityQueue.from(coll) override protected def newSpecificBuilder: Builder[A, PriorityQueue[A]] = PriorityQueue.newBuilder override def empty: PriorityQueue[A] = PriorityQueue.empty @@ -161,7 +162,7 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) this } - override def addAll(xs: IterableOnce[A]): this.type = { + override def addAll(xs: IterableOnce[A]^): this.type = { val from = resarr.p_size0 for (x <- xs.iterator) unsafeAdd(x) heapify(from) @@ -364,7 +365,7 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) pq } - override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = { val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) if (copied > 0) { Array.copy(resarr.p_array, 1, xs, start, copied) @@ -383,7 +384,7 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) @SerialVersionUID(3L) object PriorityQueue extends SortedIterableFactory[PriorityQueue] { - def newBuilder[A : Ordering]: Builder[A, PriorityQueue[A]] = { + def newBuilder[sealed A : Ordering]: Builder[A, PriorityQueue[A]] = { new Builder[A, PriorityQueue[A]] { val pq = new PriorityQueue[A] def addOne(elem: A): this.type = { pq.unsafeAdd(elem); this } @@ -392,9 +393,9 @@ object PriorityQueue extends SortedIterableFactory[PriorityQueue] { } } - def empty[A : Ordering]: PriorityQueue[A] = new PriorityQueue[A] + def empty[sealed A : Ordering]: PriorityQueue[A] = new PriorityQueue[A] - def from[E : Ordering](it: IterableOnce[E]): PriorityQueue[E] = { + def from[sealed E : Ordering](it: IterableOnce[E]^): PriorityQueue[E] = { val b = newBuilder[E] b ++= it b.result() diff --git a/tests/pos-special/stdlib/collection/mutable/Queue.scala b/tests/pos-special/stdlib/collection/mutable/Queue.scala index 18cce0bd3852..a578b0742009 100644 --- a/tests/pos-special/stdlib/collection/mutable/Queue.scala +++ b/tests/pos-special/stdlib/collection/mutable/Queue.scala @@ -15,6 +15,7 @@ package mutable import scala.annotation.nowarn import scala.collection.generic.DefaultSerializable +import language.experimental.captureChecking /** `Queue` objects implement data structures that allow to @@ -27,7 +28,7 @@ import scala.collection.generic.DefaultSerializable * @define mayNotTerminateInf * @define willNotTerminateInf */ -class Queue[A] protected (array: Array[AnyRef], start: Int, end: Int) +class Queue[sealed A] protected (array: Array[AnyRef], start: Int, end: Int) extends ArrayDeque[A](array, start, end) with IndexedSeqOps[A, Queue, Queue[A]] with StrictOptimizedSeqOps[A, Queue, Queue[A]] @@ -129,10 +130,10 @@ class Queue[A] protected (array: Array[AnyRef], start: Int, end: Int) @SerialVersionUID(3L) object Queue extends StrictOptimizedSeqFactory[Queue] { - def from[A](source: IterableOnce[A]): Queue[A] = empty ++= source + def from[sealed A](source: IterableOnce[A]^): Queue[A] = empty ++= source - def empty[A]: Queue[A] = new Queue + def empty[sealed A]: Queue[A] = new Queue - def newBuilder[A]: Builder[A, Queue[A]] = new GrowableBuilder[A, Queue[A]](empty) + def newBuilder[sealed A]: Builder[A, Queue[A]] = new GrowableBuilder[A, Queue[A]](empty) } diff --git a/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala b/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala index 3ac0e1a1f797..1f320f832cdf 100644 --- a/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala +++ b/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala @@ -16,6 +16,7 @@ package collection.mutable import scala.annotation.tailrec import collection.{AbstractIterator, Iterator} import java.lang.String +import language.experimental.captureChecking /** * An object containing the red-black tree implementation used by mutable `TreeMaps`. @@ -31,25 +32,25 @@ private[collection] object RedBlackTree { // Therefore, while obtaining the size of the whole tree is O(1), knowing the number of entries inside a range is O(n) // on the size of the range. - final class Tree[A, B](var root: Node[A, B], var size: Int) { + final class Tree[sealed A, sealed B](var root: Node[A, B], var size: Int) { def treeCopy(): Tree[A, B] = new Tree(copyTree(root), size) } - final class Node[A, B](var key: A, var value: B, var red: Boolean, var left: Node[A, B], var right: Node[A, B], var parent: Node[A, B]) { + final class Node[sealed A, sealed B](var key: A, var value: B, var red: Boolean, var left: Node[A, B], var right: Node[A, B], var parent: Node[A, B]) { override def toString: String = "Node(" + key + ", " + value + ", " + red + ", " + left + ", " + right + ")" } object Tree { - def empty[A, B]: Tree[A, B] = new Tree(null, 0) + def empty[sealed A, sealed B]: Tree[A, B] = new Tree(null, 0) } object Node { - @`inline` def apply[A, B](key: A, value: B, red: Boolean, + @`inline` def apply[sealed A, sealed B](key: A, value: B, red: Boolean, left: Node[A, B], right: Node[A, B], parent: Node[A, B]): Node[A, B] = new Node(key, value, red, left, right, parent) - @`inline` def leaf[A, B](key: A, value: B, red: Boolean, parent: Node[A, B]): Node[A, B] = + @`inline` def leaf[sealed A, sealed B](key: A, value: B, red: Boolean, parent: Node[A, B]): Node[A, B] = new Node(key, value, red, null, null, parent) def unapply[A, B](t: Node[A, B]) = Some((t.key, t.value, t.left, t.right, t.parent)) @@ -180,7 +181,7 @@ private[collection] object RedBlackTree { // ---- insertion ---- - def insert[A, B](tree: Tree[A, B], key: A, value: B)(implicit ord: Ordering[A]): Unit = { + def insert[sealed A, sealed B](tree: Tree[A, B], key: A, value: B)(implicit ord: Ordering[A]): Unit = { var y: Node[A, B] = null var x = tree.root var cmp = 1 @@ -476,16 +477,16 @@ private[collection] object RedBlackTree { if (node.right ne null) transformNodeNonNull(node.right, f) } - def iterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[(A, B)] = + def iterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[(A, B)] = new EntriesIterator(tree, start, end) - def keysIterator[A: Ordering](tree: Tree[A, _], start: Option[A] = None, end: Option[A] = None): Iterator[A] = + def keysIterator[sealed A: Ordering](tree: Tree[A, _], start: Option[A] = None, end: Option[A] = None): Iterator[A] = new KeysIterator(tree, start, end) - def valuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[B] = + def valuesIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[B] = new ValuesIterator(tree, start, end) - private[this] abstract class TreeIterator[A, B, R](tree: Tree[A, B], start: Option[A], end: Option[A]) + private[this] abstract class TreeIterator[sealed A, sealed B, R](tree: Tree[A, B], start: Option[A], end: Option[A]) (implicit ord: Ordering[A]) extends AbstractIterator[R] { protected def nextResult(node: Node[A, B]): R @@ -513,19 +514,19 @@ private[collection] object RedBlackTree { setNullIfAfterEnd() } - private[this] final class EntriesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A]) + private[this] final class EntriesIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A], end: Option[A]) extends TreeIterator[A, B, (A, B)](tree, start, end) { def nextResult(node: Node[A, B]) = (node.key, node.value) } - private[this] final class KeysIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A]) + private[this] final class KeysIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A], end: Option[A]) extends TreeIterator[A, B, A](tree, start, end) { def nextResult(node: Node[A, B]) = node.key } - private[this] final class ValuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A], end: Option[A]) + private[this] final class ValuesIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A], end: Option[A]) extends TreeIterator[A, B, B](tree, start, end) { def nextResult(node: Node[A, B]) = node.value @@ -603,7 +604,7 @@ private[collection] object RedBlackTree { // building /** Build a Tree suitable for a TreeSet from an ordered sequence of keys */ - def fromOrderedKeys[A](xs: Iterator[A], size: Int): Tree[A, Null] = { + def fromOrderedKeys[sealed A](xs: Iterator[A], size: Int): Tree[A, Null] = { val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes def f(level: Int, size: Int): Node[A, Null] = size match { case 0 => null @@ -622,7 +623,7 @@ private[collection] object RedBlackTree { } /** Build a Tree suitable for a TreeMap from an ordered sequence of key/value pairs */ - def fromOrderedEntries[A, B](xs: Iterator[(A, B)], size: Int): Tree[A, B] = { + def fromOrderedEntries[sealed A, sealed B](xs: Iterator[(A, B)], size: Int): Tree[A, B] = { val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes def f(level: Int, size: Int): Node[A, B] = size match { case 0 => null @@ -642,7 +643,7 @@ private[collection] object RedBlackTree { new Tree(f(1, size), size) } - def copyTree[A, B](n: Node[A, B]): Node[A, B] = + def copyTree[sealed A, sealed B](n: Node[A, B]): Node[A, B] = if(n eq null) null else { val c = new Node(n.key, n.value, n.red, copyTree(n.left), copyTree(n.right), null) if(c.left != null) c.left.parent = c diff --git a/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala index d7d3b6db4f09..246e525e37d9 100644 --- a/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala +++ b/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala @@ -14,6 +14,7 @@ package scala package collection package mutable +import language.experimental.captureChecking /** `ReusableBuilder` is a marker trait that indicates that a `Builder` * can be reused to build more than one instance of a collection. In diff --git a/tests/pos-special/stdlib/collection/mutable/Seq.scala b/tests/pos-special/stdlib/collection/mutable/Seq.scala index e83d79987208..443eec379c1b 100644 --- a/tests/pos-special/stdlib/collection/mutable/Seq.scala +++ b/tests/pos-special/stdlib/collection/mutable/Seq.scala @@ -13,6 +13,7 @@ package scala.collection.mutable import scala.collection.{IterableFactoryDefaults, SeqFactory} +import language.experimental.captureChecking trait Seq[A] extends Iterable[A] diff --git a/tests/pos-special/stdlib/collection/mutable/SeqMap.scala b/tests/pos-special/stdlib/collection/mutable/SeqMap.scala index 67066f99e07e..5740490223b2 100644 --- a/tests/pos-special/stdlib/collection/mutable/SeqMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/SeqMap.scala @@ -13,6 +13,7 @@ package scala package collection package mutable +import language.experimental.captureChecking /** * A generic trait for ordered mutable maps. Concrete classes have to provide diff --git a/tests/pos-special/stdlib/collection/mutable/Set.scala b/tests/pos-special/stdlib/collection/mutable/Set.scala index 6530e8fedf05..01384e993e89 100644 --- a/tests/pos-special/stdlib/collection/mutable/Set.scala +++ b/tests/pos-special/stdlib/collection/mutable/Set.scala @@ -13,6 +13,7 @@ package scala.collection.mutable import scala.collection.{IterableFactory, IterableFactoryDefaults, IterableOps} +import language.experimental.captureChecking /** Base trait for mutable sets */ trait Set[A] diff --git a/tests/pos-special/stdlib/collection/mutable/Shrinkable.scala b/tests/pos-special/stdlib/collection/mutable/Shrinkable.scala index 006a3b88e49f..de2a24ecf01f 100644 --- a/tests/pos-special/stdlib/collection/mutable/Shrinkable.scala +++ b/tests/pos-special/stdlib/collection/mutable/Shrinkable.scala @@ -14,6 +14,7 @@ package scala package collection.mutable import scala.annotation.tailrec +import language.experimental.captureChecking /** This trait forms part of collections that can be reduced * using a `-=` operator. @@ -52,7 +53,7 @@ trait Shrinkable[-A] { * @param xs the iterator producing the elements to remove. * @return the $coll itself */ - def subtractAll(xs: collection.IterableOnce[A]): this.type = { + def subtractAll(xs: collection.IterableOnce[A]^): this.type = { @tailrec def loop(xs: collection.LinearSeq[A]): Unit = { if (xs.nonEmpty) { subtractOne(xs.head) @@ -74,6 +75,6 @@ trait Shrinkable[-A] { } /** Alias for `subtractAll` */ - @`inline` final def --= (xs: collection.IterableOnce[A]): this.type = subtractAll(xs) + @`inline` final def --= (xs: collection.IterableOnce[A]^): this.type = subtractAll(xs) } diff --git a/tests/pos-special/stdlib/collection/mutable/SortedMap.scala b/tests/pos-special/stdlib/collection/mutable/SortedMap.scala index eb2f0d231b7a..8017177f5720 100644 --- a/tests/pos-special/stdlib/collection/mutable/SortedMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/SortedMap.scala @@ -14,6 +14,7 @@ package scala package collection.mutable import scala.collection.{SortedMapFactory, SortedMapFactoryDefaults} +import language.experimental.captureChecking /** * Base type for mutable sorted map collections @@ -37,7 +38,7 @@ trait SortedMap[K, V] * @param d the function mapping keys to values, used for non-present keys * @return a wrapper of the map with a default value */ - override def withDefault(d: K => V): SortedMap[K, V] = new SortedMap.WithDefault[K, V](this, d) + override def withDefault(d: K -> V): SortedMap[K, V] = new SortedMap.WithDefault[K, V](this, d) /** The same map with a given default value. * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc. @@ -66,7 +67,7 @@ trait SortedMapOps[K, V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { @SerialVersionUID(3L) - final class WithDefault[K, V](underlying: SortedMap[K, V], defaultValue: K => V) + final class WithDefault[K, V](underlying: SortedMap[K, V], defaultValue: K -> V) extends Map.WithDefault[K, V](underlying, defaultValue) with SortedMap[K, V] with SortedMapOps[K, V, SortedMap, WithDefault[K, V]] @@ -91,10 +92,10 @@ object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) { override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue) - override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]): SortedMap[K, V2] = + override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]^): SortedMap[K, V2] = underlying.concat(suffix).withDefault(defaultValue) - override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]): WithDefault[K, V] = + override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): WithDefault[K, V] = new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue) override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] = diff --git a/tests/pos-special/stdlib/collection/mutable/SortedSet.scala b/tests/pos-special/stdlib/collection/mutable/SortedSet.scala index 2bcb8dc7845a..e657fb749d7d 100644 --- a/tests/pos-special/stdlib/collection/mutable/SortedSet.scala +++ b/tests/pos-special/stdlib/collection/mutable/SortedSet.scala @@ -13,6 +13,7 @@ package scala package collection package mutable +import language.experimental.captureChecking /** * Base type for mutable sorted set collections diff --git a/tests/pos-special/stdlib/collection/mutable/Stack.scala b/tests/pos-special/stdlib/collection/mutable/Stack.scala index 675666bc805c..4efa9621f374 100644 --- a/tests/pos-special/stdlib/collection/mutable/Stack.scala +++ b/tests/pos-special/stdlib/collection/mutable/Stack.scala @@ -16,6 +16,8 @@ import scala.annotation.{migration, nowarn} import scala.collection.generic.DefaultSerializable import scala.collection.{IterableFactoryDefaults, IterableOnce, SeqFactory, StrictOptimizedSeqFactory, StrictOptimizedSeqOps} +import language.experimental.captureChecking + /** A stack implements a data structure which allows to store and retrieve * objects in a last-in-first-out (LIFO) fashion. * @@ -33,7 +35,7 @@ import scala.collection.{IterableFactoryDefaults, IterableOnce, SeqFactory, Stri * @define willNotTerminateInf */ @migration("Stack is now based on an ArrayDeque instead of a linked list", "2.13.0") -class Stack[A] protected (array: Array[AnyRef], start: Int, end: Int) +class Stack[sealed A] protected (array: Array[AnyRef], start: Int, end: Int) extends ArrayDeque[A](array, start, end) with IndexedSeqOps[A, Stack, Stack[A]] with StrictOptimizedSeqOps[A, Stack, Stack[A]] @@ -133,10 +135,10 @@ class Stack[A] protected (array: Array[AnyRef], start: Int, end: Int) @SerialVersionUID(3L) object Stack extends StrictOptimizedSeqFactory[Stack] { - def from[A](source: IterableOnce[A]): Stack[A] = empty ++= source + def from[sealed A](source: IterableOnce[A]^): Stack[A] = empty ++= source - def empty[A]: Stack[A] = new Stack + def empty[sealed A]: Stack[A] = new Stack - def newBuilder[A]: Builder[A, Stack[A]] = new GrowableBuilder[A, Stack[A]](empty) + def newBuilder[sealed A]: Builder[A, Stack[A]] = new GrowableBuilder[A, Stack[A]](empty) } diff --git a/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala b/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala index 1d8b9563e917..5320fa1dabb0 100644 --- a/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala +++ b/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala @@ -14,6 +14,7 @@ package scala.collection.mutable import scala.collection.{IterableFactoryDefaults, IterableOnce} import scala.collection.immutable.WrappedString +import language.experimental.captureChecking import scala.Predef.{ // unimport char-related implicit conversions to avoid triggering them accidentally genericArrayOps => _, @@ -81,7 +82,7 @@ final class StringBuilder(val underlying: java.lang.StringBuilder) extends Abstr // Methods required to make this an IndexedSeq: def apply(i: Int): Char = underlying.charAt(i) - override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]): StringBuilder = + override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]^): StringBuilder = new StringBuilder() appendAll coll override protected def newSpecificBuilder: Builder[Char, StringBuilder] = @@ -109,7 +110,7 @@ final class StringBuilder(val underlying: java.lang.StringBuilder) extends Abstr override def toString: String = result() - override def toArray[B >: Char](implicit ct: scala.reflect.ClassTag[B]) = + override def toArray[sealed B >: Char](implicit ct: scala.reflect.ClassTag[B]) = ct.runtimeClass match { case java.lang.Character.TYPE => toCharArray.asInstanceOf[Array[B]] case _ => super.toArray @@ -184,7 +185,7 @@ final class StringBuilder(val underlying: java.lang.StringBuilder) extends Abstr * @param xs the characters to be appended. * @return this StringBuilder. */ - def appendAll(xs: IterableOnce[Char]): this.type = { + def appendAll(xs: IterableOnce[Char]^): this.type = { xs match { case x: WrappedString => underlying append x.unwrap case x: ArraySeq.ofChar => underlying append x.array @@ -313,7 +314,7 @@ final class StringBuilder(val underlying: java.lang.StringBuilder) extends Abstr * @return this StringBuilder. * @throws StringIndexOutOfBoundsException if the index is out of bounds. */ - def insertAll(index: Int, xs: IterableOnce[Char]): this.type = + def insertAll(index: Int, xs: IterableOnce[Char]^): this.type = insertAll(index, (ArrayBuilder.make[Char] ++= xs).result()) /** Inserts the given Array[Char] into this sequence at the given index. diff --git a/tests/pos-special/stdlib/collection/mutable/TreeMap.scala b/tests/pos-special/stdlib/collection/mutable/TreeMap.scala index 1af968a08ac3..f714a9ed46c2 100644 --- a/tests/pos-special/stdlib/collection/mutable/TreeMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/TreeMap.scala @@ -17,6 +17,7 @@ package mutable import scala.collection.Stepper.EfficientSplit import scala.collection.generic.DefaultSerializable import scala.collection.mutable.{RedBlackTree => RB} +import language.experimental.captureChecking /** * A mutable sorted map implemented using a mutable red-black tree as underlying data structure. @@ -28,7 +29,7 @@ import scala.collection.mutable.{RedBlackTree => RB} * @define Coll mutable.TreeMap * @define coll mutable tree map */ -sealed class TreeMap[K, V] private (tree: RB.Tree[K, V])(implicit val ordering: Ordering[K]) +sealed class TreeMap[sealed K, sealed V] private (tree: RB.Tree[K, V])(implicit val ordering: Ordering[K]) extends AbstractMap[K, V] with SortedMap[K, V] with SortedMapOps[K, V, TreeMap, TreeMap[K, V]] @@ -247,11 +248,11 @@ sealed class TreeMap[K, V] private (tree: RB.Tree[K, V])(implicit val ordering: @SerialVersionUID(3L) object TreeMap extends SortedMapFactory[TreeMap] { - def from[K : Ordering, V](it: IterableOnce[(K, V)]): TreeMap[K, V] = + def from[sealed K : Ordering, sealed V](it: IterableOnce[(K, V)]^): TreeMap[K, V] = Growable.from(empty[K, V], it) - def empty[K : Ordering, V]: TreeMap[K, V] = new TreeMap[K, V]() + def empty[sealed K : Ordering, sealed V]: TreeMap[K, V] = new TreeMap[K, V]() - def newBuilder[K: Ordering, V]: Builder[(K, V), TreeMap[K, V]] = new GrowableBuilder(empty[K, V]) + def newBuilder[sealed K: Ordering, sealed V]: Builder[(K, V), TreeMap[K, V]] = new GrowableBuilder(empty[K, V]) } diff --git a/tests/pos-special/stdlib/collection/mutable/TreeSet.scala b/tests/pos-special/stdlib/collection/mutable/TreeSet.scala index bed474dc02a3..9ba439bea041 100644 --- a/tests/pos-special/stdlib/collection/mutable/TreeSet.scala +++ b/tests/pos-special/stdlib/collection/mutable/TreeSet.scala @@ -17,6 +17,7 @@ import scala.collection.Stepper.EfficientSplit import scala.collection.generic.DefaultSerializable import scala.collection.mutable.{RedBlackTree => RB} import scala.collection.{SortedIterableFactory, SortedSetFactoryDefaults, Stepper, StepperShape, StrictOptimizedIterableOps, StrictOptimizedSortedSetOps, mutable} +import language.experimental.captureChecking /** * A mutable sorted set implemented using a mutable red-black tree as underlying data structure. @@ -28,7 +29,7 @@ import scala.collection.{SortedIterableFactory, SortedSetFactoryDefaults, Steppe * @define coll mutable tree set */ // Original API designed in part by Lucien Pereira -sealed class TreeSet[A] private (private val tree: RB.Tree[A, Null])(implicit val ordering: Ordering[A]) +sealed class TreeSet[sealed A] private (private val tree: RB.Tree[A, Null])(implicit val ordering: Ordering[A]) extends AbstractSet[A] with SortedSet[A] with SortedSetOps[A, TreeSet, TreeSet[A]] @@ -191,9 +192,9 @@ sealed class TreeSet[A] private (private val tree: RB.Tree[A, Null])(implicit va @SerialVersionUID(3L) object TreeSet extends SortedIterableFactory[TreeSet] { - def empty[A : Ordering]: TreeSet[A] = new TreeSet[A]() + def empty[sealed A : Ordering]: TreeSet[A] = new TreeSet[A]() - def from[E](it: IterableOnce[E])(implicit ordering: Ordering[E]): TreeSet[E] = + def from[sealed E](it: IterableOnce[E]^)(implicit ordering: Ordering[E]): TreeSet[E] = it match { case ts: TreeSet[E] if ordering == ts.ordering => new TreeSet[E](ts.tree.treeCopy()) @@ -209,7 +210,7 @@ object TreeSet extends SortedIterableFactory[TreeSet] { new TreeSet[E](t) } - def newBuilder[A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = new ReusableBuilder[A, TreeSet[A]] { + def newBuilder[sealed A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = new ReusableBuilder[A, TreeSet[A]] { private[this] var tree: RB.Tree[A, Null] = RB.Tree.empty def addOne(elem: A): this.type = { RB.insert(tree, elem, null); this } def result(): TreeSet[A] = new TreeSet[A](tree) diff --git a/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala b/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala index 489f2a1b0387..2015b76a31b8 100644 --- a/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala +++ b/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala @@ -17,6 +17,7 @@ import scala.annotation.tailrec import scala.collection.generic.DefaultSerializable import scala.reflect.ClassTag import scala.collection.immutable.Nil +import language.experimental.captureChecking /** A buffer that stores elements in an unrolled linked list. * @@ -45,7 +46,7 @@ import scala.collection.immutable.Nil * */ @SerialVersionUID(3L) -sealed class UnrolledBuffer[T](implicit val tag: ClassTag[T]) +sealed class UnrolledBuffer[sealed T](implicit val tag: ClassTag[T]) extends AbstractBuffer[T] with Buffer[T] with Seq[T] @@ -190,7 +191,7 @@ sealed class UnrolledBuffer[T](implicit val tag: ClassTag[T]) def insert(idx: Int, elem: T): Unit = insertAll(idx, elem :: Nil) - def insertAll(idx: Int, elems: IterableOnce[T]): Unit = + def insertAll(idx: Int, elems: IterableOnce[T]^): Unit = if (idx >= 0 && idx <= sz) { sz += headptr.insertAll(idx, elems, this) } else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})") @@ -202,7 +203,7 @@ sealed class UnrolledBuffer[T](implicit val tag: ClassTag[T]) this } - def patchInPlace(from: Int, patch: collection.IterableOnce[T], replaced: Int): this.type = { + def patchInPlace(from: Int, patch: collection.IterableOnce[T]^, replaced: Int): this.type = { remove(from, replaced) insertAll(from, patch) this @@ -240,11 +241,11 @@ object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] val untagged: SeqFactory[UnrolledBuffer] = new ClassTagSeqFactory.AnySeqDelegate(self) - def empty[A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] + def empty[sealed A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] - def from[A : ClassTag](source: scala.collection.IterableOnce[A]): UnrolledBuffer[A] = newBuilder[A].addAll(source) + def from[sealed A : ClassTag](source: scala.collection.IterableOnce[A]^): UnrolledBuffer[A] = newBuilder[A].addAll(source) - def newBuilder[A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] + def newBuilder[sealed A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A] final val waterline: Int = 50 @@ -257,7 +258,7 @@ object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] /** Unrolled buffer node. */ - class Unrolled[T: ClassTag] private[collection] (var size: Int, var array: Array[T], var next: Unrolled[T], val buff: UnrolledBuffer[T] = null) { + class Unrolled[sealed T: ClassTag] private[collection] (var size: Int, var array: Array[T], var next: Unrolled[T], val buff: UnrolledBuffer[T] = null) { private[collection] def this() = this(0, new Array[T](unrolledlength), null, null) private[collection] def this(b: UnrolledBuffer[T]) = this(0, new Array[T](unrolledlength), null, b) @@ -372,7 +373,7 @@ object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] if (next eq null) true else false // checks if last node was thrown out } else false - @tailrec final def insertAll(idx: Int, t: scala.collection.IterableOnce[T], buffer: UnrolledBuffer[T]): Int = { + @tailrec final def insertAll(idx: Int, t: scala.collection.IterableOnce[T]^, buffer: UnrolledBuffer[T]): Int = { if (idx < size) { // divide this node at the appropriate position and insert all into head // update new next @@ -436,7 +437,7 @@ object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] // This is used by scala.collection.parallel.mutable.UnrolledParArrayCombiner: // Todo -- revisit whether inheritance is the best way to achieve this functionality -private[collection] class DoublingUnrolledBuffer[T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) { +private[collection] class DoublingUnrolledBuffer[sealed T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) { override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz override protected def newUnrolled = new UnrolledBuffer.Unrolled[T](0, new Array[T](4), null, this) } diff --git a/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala b/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala index 7286a318e1f9..a9498b7fc69b 100644 --- a/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala +++ b/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala @@ -16,6 +16,7 @@ package mutable import scala.annotation.nowarn import scala.collection.convert.JavaCollectionWrappers.{JMapWrapper, JMapWrapperLike} +import language.experimental.captureChecking /** A hash map with references to entries which are weakly reachable. Entries are * removed from this map when the key is no longer (strongly) referenced. This class wraps @@ -33,7 +34,7 @@ import scala.collection.convert.JavaCollectionWrappers.{JMapWrapper, JMapWrapper * @define willNotTerminateInf */ @SerialVersionUID(3L) -class WeakHashMap[K, V] extends JMapWrapper[K, V](new java.util.WeakHashMap) +class WeakHashMap[sealed K, sealed V] extends JMapWrapper[K, V](new java.util.WeakHashMap) with JMapWrapperLike[K, V, WeakHashMap, WeakHashMap[K, V]] with MapFactoryDefaults[K, V, WeakHashMap, Iterable] { override def empty = new WeakHashMap[K, V] @@ -48,8 +49,8 @@ class WeakHashMap[K, V] extends JMapWrapper[K, V](new java.util.WeakHashMap) */ @SerialVersionUID(3L) object WeakHashMap extends MapFactory[WeakHashMap] { - def empty[K, V]: WeakHashMap[K,V] = new WeakHashMap[K, V] - def from[K, V](it: collection.IterableOnce[(K, V)]): WeakHashMap[K,V] = Growable.from(empty[K, V], it) - def newBuilder[K, V]: Builder[(K, V), WeakHashMap[K,V]] = new GrowableBuilder(WeakHashMap.empty[K, V]) + def empty[sealed K, sealed V]: WeakHashMap[K,V] = new WeakHashMap[K, V] + def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^): WeakHashMap[K,V] = Growable.from(empty[K, V], it) + def newBuilder[sealed K, sealed V]: Builder[(K, V), WeakHashMap[K,V]] = new GrowableBuilder(WeakHashMap.empty[K, V]) } diff --git a/tests/pos-special/stdlib/collection/mutable/package.scala b/tests/pos-special/stdlib/collection/mutable/package.scala index 4915e8a48b22..d658ca5bc65a 100644 --- a/tests/pos-special/stdlib/collection/mutable/package.scala +++ b/tests/pos-special/stdlib/collection/mutable/package.scala @@ -11,6 +11,7 @@ */ package scala.collection +import language.experimental.captureChecking package object mutable { diff --git a/tests/pos-special/stdlib/collection/package.scala b/tests/pos-special/stdlib/collection/package.scala index 954573ff1ddd..ad4686be1fb2 100644 --- a/tests/pos-special/stdlib/collection/package.scala +++ b/tests/pos-special/stdlib/collection/package.scala @@ -11,6 +11,7 @@ */ package scala +import language.experimental.captureChecking package object collection { @deprecated("Use Iterable instead of Traversable", "2.13.0")